Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a476a4b734 | |||
| 11b205ddaf | |||
| 71c60ade8a | |||
| bf3dc635d9 |
@@ -79,8 +79,8 @@ android {
|
|||||||
applicationId "com.ariacockpit"
|
applicationId "com.ariacockpit"
|
||||||
minSdkVersion rootProject.ext.minSdkVersion
|
minSdkVersion rootProject.ext.minSdkVersion
|
||||||
targetSdkVersion rootProject.ext.targetSdkVersion
|
targetSdkVersion rootProject.ext.targetSdkVersion
|
||||||
versionCode 10406
|
versionCode 10408
|
||||||
versionName "0.1.4.6"
|
versionName "0.1.4.8"
|
||||||
// Fallback fuer Libraries mit Product Flavors
|
// Fallback fuer Libraries mit Product Flavors
|
||||||
missingDimensionStrategy 'react-native-camera', 'general'
|
missingDimensionStrategy 'react-native-camera', 'general'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "aria-cockpit",
|
"name": "aria-cockpit",
|
||||||
"version": "0.1.4.6",
|
"version": "0.1.4.8",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"android": "react-native run-android",
|
"android": "react-native run-android",
|
||||||
|
|||||||
@@ -686,23 +686,26 @@ const ChatScreen: React.FC = () => {
|
|||||||
// gesetzt UND text leer/Placeholder)
|
// gesetzt UND text leer/Placeholder)
|
||||||
// - User-Bubbles deren clientMsgId der Server noch nicht kennt:
|
// - User-Bubbles deren clientMsgId der Server noch nicht kennt:
|
||||||
// z.B. waehrend Reconnect-Race oder solange flushQueuedMessages
|
// z.B. waehrend Reconnect-Race oder solange flushQueuedMessages
|
||||||
// noch laeuft. ABER: wenn der Server eine textgleiche Bubble
|
// noch laeuft. ABER: wenn der Server eine textgleiche User-
|
||||||
// im gleichen 5-Min-Fenster hat (Alter Backup-Eintrag ohne
|
// Bubble hat (egal mit welcher cmid oder ohne — z.B. wenn die
|
||||||
// clientMsgId, vor dem Bridge-Patch geschrieben), werten wir
|
// Bubble aus einer Bridge-Version vor dem clientMsgId-Patch
|
||||||
// das als Treffer und verwerfen die lokale Kopie — sonst
|
// stammt oder wenn die ts kaputt sind), werten wir das als
|
||||||
// Doppelpost: einmal als Server-Bubble (delivered) und einmal
|
// Treffer und verwerfen die lokale Kopie. Sonst Doppelpost:
|
||||||
// als lokale failed/queued mit Retry-Knopf.
|
// einmal als Server-Bubble (delivered) und einmal als lokale
|
||||||
const FIVE_MIN = 5 * 60 * 1000;
|
// failed/queued mit Retry-Knopf.
|
||||||
|
const serverUserTexts = new Set(
|
||||||
|
fromServer.filter(s => s.sender === 'user').map(s => s.text || '')
|
||||||
|
);
|
||||||
const localOnly = prev.filter(m => {
|
const localOnly = prev.filter(m => {
|
||||||
if (m.skillCreated || m.triggerCreated || m.memorySaved) return true;
|
if (m.skillCreated || m.triggerCreated || m.memorySaved) return true;
|
||||||
if (m.audioRequestId && (!m.text || m.text === '🎙 Aufnahme...' || m.text === 'Aufnahme...')) return true;
|
if (m.audioRequestId && (!m.text || m.text === '🎙 Aufnahme...' || m.text === 'Aufnahme...')) return true;
|
||||||
if (m.sender === 'user' && m.clientMsgId && !serverCmids.has(m.clientMsgId)) {
|
if (m.sender === 'user' && m.clientMsgId && !serverCmids.has(m.clientMsgId)) {
|
||||||
const serverHasIt = fromServer.some(s =>
|
// Text-Match-Fallback: wenn der Server irgendwo eine textgleiche
|
||||||
s.sender === 'user' &&
|
// User-Bubble hat, ist es dieselbe Nachricht (vor cmid-Aera, ts
|
||||||
s.text === m.text &&
|
// kaputt etc.) — wir verwerfen die lokale Kopie. Leerer Text
|
||||||
Math.abs((s.timestamp || 0) - (m.timestamp || 0)) < FIVE_MIN,
|
// (z.B. nur Anhang) faellt nicht in den Vergleich.
|
||||||
);
|
const text = m.text || '';
|
||||||
if (serverHasIt) return false;
|
if (text && serverUserTexts.has(text)) return false;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
@@ -1024,9 +1027,12 @@ const ChatScreen: React.FC = () => {
|
|||||||
const tool = (message.payload.tool as string) || '';
|
const tool = (message.payload.tool as string) || '';
|
||||||
setAgentActivity({ activity, tool });
|
setAgentActivity({ activity, tool });
|
||||||
// In den Gedanken-Stream einfuegen. Dedup gegen identische Folge-
|
// In den Gedanken-Stream einfuegen. Dedup gegen identische Folge-
|
||||||
// Events (z.B. zwei mal 'thinking' direkt hintereinander).
|
// Events (z.B. zwei mal 'thinking' direkt hintereinander). Tool-
|
||||||
|
// Events NIE dedupen — wenn ARIA dreimal Bash hintereinander ruft,
|
||||||
|
// sollen alle drei sichtbar sein.
|
||||||
const key = `${activity}|${tool}`;
|
const key = `${activity}|${tool}`;
|
||||||
if (key !== lastThoughtKeyRef.current) {
|
const isTool = activity === 'tool';
|
||||||
|
if (isTool || key !== lastThoughtKeyRef.current) {
|
||||||
lastThoughtKeyRef.current = key;
|
lastThoughtKeyRef.current = key;
|
||||||
setThoughts(prev => {
|
setThoughts(prev => {
|
||||||
const next = [...prev, { ts: Date.now(), activity, tool }];
|
const next = [...prev, { ts: Date.now(), activity, tool }];
|
||||||
|
|||||||
+33
-5
@@ -997,8 +997,13 @@ class ARIABridge:
|
|||||||
"""Schreibt eine Zeile in /shared/config/chat_backup.jsonl.
|
"""Schreibt eine Zeile in /shared/config/chat_backup.jsonl.
|
||||||
Wird von Diagnostic + App als History-Quelle gelesen.
|
Wird von Diagnostic + App als History-Quelle gelesen.
|
||||||
entry braucht mindestens {role, text}; ts wird ergaenzt.
|
entry braucht mindestens {role, text}; ts wird ergaenzt.
|
||||||
Returns den ts (auch fuer Bubble-Loeschen-Tracking)."""
|
Returns den ts (auch fuer Bubble-Loeschen-Tracking).
|
||||||
ts = int(asyncio.get_event_loop().time() * 1000)
|
|
||||||
|
WICHTIG: ts ist UNIX-ms (time.time()*1000), NICHT loop-time.
|
||||||
|
Loop-time ist Container-monotonic — bei jedem Restart wieder 0.
|
||||||
|
Das brach die App-History-Sortierung weil App-side Date.now()
|
||||||
|
(echtes UNIX-ms) mit Bridge-Container-Uptime gemischt wurde."""
|
||||||
|
ts = int(time.time() * 1000)
|
||||||
try:
|
try:
|
||||||
line = {"ts": ts}
|
line = {"ts": ts}
|
||||||
line.update(entry)
|
line.update(entry)
|
||||||
@@ -2519,17 +2524,22 @@ class ARIABridge:
|
|||||||
status = await asyncio.get_event_loop().run_in_executor(None, _do_request)
|
status = await asyncio.get_event_loop().run_in_executor(None, _do_request)
|
||||||
logger.info("[cancel] Diagnostic /api/cancel: %s", status)
|
logger.info("[cancel] Diagnostic /api/cancel: %s", status)
|
||||||
|
|
||||||
async def _emit_activity(self, activity: str, tool: str = "") -> None:
|
async def _emit_activity(self, activity: str, tool: str = "", force: bool = False) -> None:
|
||||||
"""Sendet agent_activity an die App — nur wenn sich der State geaendert hat.
|
"""Sendet agent_activity an die App — nur wenn sich der State geaendert hat.
|
||||||
|
|
||||||
Trailing Agent-Events nach chat:final werden 3s lang unterdrueckt
|
Trailing Agent-Events nach chat:final werden 3s lang unterdrueckt
|
||||||
(nur 'idle' kommt immer durch)."""
|
(nur 'idle' kommt immer durch).
|
||||||
|
|
||||||
|
force=True: kein State-Dedup — wird vom Proxy-Tool-Hook genutzt
|
||||||
|
damit auch wiederholte gleiche Tool-Aufrufe (z.B. 3x Bash
|
||||||
|
hintereinander) im Gedanken-Stream als eigene Eintraege sichtbar
|
||||||
|
bleiben."""
|
||||||
if activity != "idle" and self._last_chat_final_at > 0:
|
if activity != "idle" and self._last_chat_final_at > 0:
|
||||||
since_final = asyncio.get_event_loop().time() - self._last_chat_final_at
|
since_final = asyncio.get_event_loop().time() - self._last_chat_final_at
|
||||||
if since_final < 3.0:
|
if since_final < 3.0:
|
||||||
return
|
return
|
||||||
state = (activity, tool)
|
state = (activity, tool)
|
||||||
if state == self._last_activity_state:
|
if not force and state == self._last_activity_state:
|
||||||
return
|
return
|
||||||
self._last_activity_state = state
|
self._last_activity_state = state
|
||||||
await self._send_to_rvs({
|
await self._send_to_rvs({
|
||||||
@@ -2677,6 +2687,24 @@ class ARIABridge:
|
|||||||
self._handle_trigger_fired(reply, trigger_name, ttype, events)
|
self._handle_trigger_fired(reply, trigger_name, ttype, events)
|
||||||
)
|
)
|
||||||
await _send_response(writer, 200, {"ok": True})
|
await _send_response(writer, 200, {"ok": True})
|
||||||
|
elif method == "POST" and path == "/internal/agent-activity":
|
||||||
|
# Vom Proxy gefeuert bei jedem Claude-Code-tool_use-Event
|
||||||
|
# (Bash, Read, Edit, Grep, ...). Wir spiegeln das als
|
||||||
|
# RVS agent_activity an App+Diagnostic damit der Gedanken-
|
||||||
|
# Stream live mitlaufen kann.
|
||||||
|
try:
|
||||||
|
data = json.loads(body.decode("utf-8", "ignore"))
|
||||||
|
except Exception as exc:
|
||||||
|
await _send_response(writer, 400, {"error": f"bad json: {exc}"})
|
||||||
|
return
|
||||||
|
tool = (data.get("tool") or "").strip()
|
||||||
|
if not tool:
|
||||||
|
await _send_response(writer, 400, {"error": "tool erforderlich"})
|
||||||
|
return
|
||||||
|
# Force-emit (kein Dedup): User soll JEDEN Tool-Call sehen
|
||||||
|
# selbst wenn derselbe Name zweimal in Folge kommt.
|
||||||
|
asyncio.create_task(self._emit_activity("tool", tool, force=True))
|
||||||
|
await _send_response(writer, 200, {"ok": True})
|
||||||
elif method == "POST" and path == "/internal/delete-chat-message":
|
elif method == "POST" and path == "/internal/delete-chat-message":
|
||||||
try:
|
try:
|
||||||
data = json.loads(body.decode("utf-8", "ignore"))
|
data = json.loads(body.decode("utf-8", "ignore"))
|
||||||
|
|||||||
@@ -2252,9 +2252,11 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
function pushThought(activity, tool) {
|
function pushThought(activity, tool) {
|
||||||
// Dedup gegen direkt aufeinanderfolgende identische Events
|
// Dedup gegen direkt aufeinanderfolgende identische Events. Tool-
|
||||||
|
// Events NIE dedupen — drei Bash-Calls in Folge sollen drei Eintraege
|
||||||
|
// ergeben, nicht einen.
|
||||||
const key = `${activity}|${tool || ''}`;
|
const key = `${activity}|${tool || ''}`;
|
||||||
if (key === lastThoughtKey) return;
|
if (activity !== 'tool' && key === lastThoughtKey) return;
|
||||||
lastThoughtKey = key;
|
lastThoughtKey = key;
|
||||||
thoughtStream.push({ ts: Date.now(), activity, tool: tool || '' });
|
thoughtStream.push({ ts: Date.now(), activity, tool: tool || '' });
|
||||||
if (thoughtStream.length > MAX_THOUGHTS) thoughtStream = thoughtStream.slice(-MAX_THOUGHTS);
|
if (thoughtStream.length > MAX_THOUGHTS) thoughtStream = thoughtStream.slice(-MAX_THOUGHTS);
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ services:
|
|||||||
sed -i 's/const DEFAULT_TIMEOUT = 300000;/const DEFAULT_TIMEOUT = 1200000;/' $$DIST/subprocess/manager.js &&
|
sed -i 's/const DEFAULT_TIMEOUT = 300000;/const DEFAULT_TIMEOUT = 1200000;/' $$DIST/subprocess/manager.js &&
|
||||||
cp /proxy-patches/openai-to-cli.js $$DIST/adapter/openai-to-cli.js &&
|
cp /proxy-patches/openai-to-cli.js $$DIST/adapter/openai-to-cli.js &&
|
||||||
cp /proxy-patches/cli-to-openai.js $$DIST/adapter/cli-to-openai.js &&
|
cp /proxy-patches/cli-to-openai.js $$DIST/adapter/cli-to-openai.js &&
|
||||||
|
cp /proxy-patches/routes.js $$DIST/server/routes.js &&
|
||||||
claude-max-api"
|
claude-max-api"
|
||||||
volumes:
|
volumes:
|
||||||
- ~/.claude:/root/.claude # Claude CLI Auth (Credentials in /root/.claude/.credentials.json)
|
- ~/.claude:/root/.claude # Claude CLI Auth (Credentials in /root/.claude/.credentials.json)
|
||||||
|
|||||||
@@ -0,0 +1,309 @@
|
|||||||
|
/**
|
||||||
|
* ARIA-patched API Route Handlers
|
||||||
|
*
|
||||||
|
* Erweiterung der npm-Version von claude-max-api-proxy:
|
||||||
|
* - Bei jedem Claude-CLI-`assistant`-Event mit tool_use-Block (Bash, Read,
|
||||||
|
* Edit, Grep, …) wird ein HTTP-POST an die Bridge gefeuert
|
||||||
|
* (ARIA_TOOL_HOOK_URL, default http://aria-bridge:8090/internal/agent-activity).
|
||||||
|
* Bridge spiegelt das als RVS `agent_activity` an App+Diagnostic →
|
||||||
|
* Gedanken-Stream zeigt live was ARIA gerade tool-maessig macht.
|
||||||
|
* - Fire-and-forget, fail-open. Wenn die Bridge nicht antwortet, bricht
|
||||||
|
* der Brain-Call NICHT ab.
|
||||||
|
*
|
||||||
|
* Wird zur Container-Startzeit ueber die npm-Version geschrieben
|
||||||
|
* (siehe docker-compose.yml proxy-Block).
|
||||||
|
*/
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
import http from "http";
|
||||||
|
import { ClaudeSubprocess } from "../subprocess/manager.js";
|
||||||
|
import { openaiToCli } from "../adapter/openai-to-cli.js";
|
||||||
|
import { cliResultToOpenai, createDoneChunk, } from "../adapter/cli-to-openai.js";
|
||||||
|
|
||||||
|
const TOOL_HOOK_URL = process.env.ARIA_TOOL_HOOK_URL
|
||||||
|
|| "http://aria-bridge:8090/internal/agent-activity";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pusht einen Tool-Use-Event an die Bridge. Fire-and-forget — keine Awaits,
|
||||||
|
* keine Fehler nach oben. Logged Fehler still.
|
||||||
|
*/
|
||||||
|
function _emitToolEvent(toolName) {
|
||||||
|
if (!toolName) return;
|
||||||
|
try {
|
||||||
|
const u = new URL(TOOL_HOOK_URL);
|
||||||
|
const body = JSON.stringify({ tool: String(toolName) });
|
||||||
|
const req = http.request({
|
||||||
|
method: "POST",
|
||||||
|
hostname: u.hostname,
|
||||||
|
port: u.port || 80,
|
||||||
|
path: u.pathname,
|
||||||
|
headers: { "Content-Type": "application/json", "Content-Length": Buffer.byteLength(body) },
|
||||||
|
timeout: 2000,
|
||||||
|
}, (res) => { res.resume(); });
|
||||||
|
req.on("error", () => {});
|
||||||
|
req.on("timeout", () => req.destroy());
|
||||||
|
req.write(body);
|
||||||
|
req.end();
|
||||||
|
} catch (_) { /* niemals weiterwerfen */ }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hookt die `assistant`-Events des Subprozesses. Jedes assistant-Message
|
||||||
|
* kann mehrere content-Bloecke haben — tool_use-Bloecke pushen wir live.
|
||||||
|
*/
|
||||||
|
function _attachToolHook(subprocess) {
|
||||||
|
subprocess.on("assistant", (message) => {
|
||||||
|
try {
|
||||||
|
const blocks = message?.message?.content || [];
|
||||||
|
for (const b of blocks) {
|
||||||
|
if (b && b.type === "tool_use" && b.name) {
|
||||||
|
_emitToolEvent(b.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (_) { /* fail-open */ }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Handle POST /v1/chat/completions
|
||||||
|
*
|
||||||
|
* Main endpoint for chat requests, supports both streaming and non-streaming
|
||||||
|
*/
|
||||||
|
export async function handleChatCompletions(req, res) {
|
||||||
|
const requestId = uuidv4().replace(/-/g, "").slice(0, 24);
|
||||||
|
const body = req.body;
|
||||||
|
const stream = body.stream === true;
|
||||||
|
try {
|
||||||
|
// Validate request
|
||||||
|
if (!body.messages || !Array.isArray(body.messages) || body.messages.length === 0) {
|
||||||
|
res.status(400).json({
|
||||||
|
error: {
|
||||||
|
message: "messages is required and must be a non-empty array",
|
||||||
|
type: "invalid_request_error",
|
||||||
|
code: "invalid_messages",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Convert to CLI input format
|
||||||
|
const cliInput = openaiToCli(body);
|
||||||
|
const subprocess = new ClaudeSubprocess();
|
||||||
|
// ARIA-Patch: Tool-Use-Events live an die Bridge weiterleiten.
|
||||||
|
// Greift fuer beide Branches (stream + non-stream).
|
||||||
|
_attachToolHook(subprocess);
|
||||||
|
if (stream) {
|
||||||
|
await handleStreamingResponse(req, res, subprocess, cliInput, requestId);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await handleNonStreamingResponse(res, subprocess, cliInput, requestId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error";
|
||||||
|
console.error("[handleChatCompletions] Error:", message);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(500).json({
|
||||||
|
error: {
|
||||||
|
message,
|
||||||
|
type: "server_error",
|
||||||
|
code: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Handle streaming response (SSE)
|
||||||
|
*
|
||||||
|
* IMPORTANT: The Express req.on("close") event fires when the request body
|
||||||
|
* is fully received, NOT when the client disconnects. For SSE connections,
|
||||||
|
* we use res.on("close") to detect actual client disconnection.
|
||||||
|
*/
|
||||||
|
async function handleStreamingResponse(req, res, subprocess, cliInput, requestId) {
|
||||||
|
// Set SSE headers
|
||||||
|
res.setHeader("Content-Type", "text/event-stream");
|
||||||
|
res.setHeader("Cache-Control", "no-cache");
|
||||||
|
res.setHeader("Connection", "keep-alive");
|
||||||
|
res.setHeader("X-Request-Id", requestId);
|
||||||
|
// CRITICAL: Flush headers immediately to establish SSE connection
|
||||||
|
// Without this, headers are buffered and client times out waiting
|
||||||
|
res.flushHeaders();
|
||||||
|
// Send initial comment to confirm connection is alive
|
||||||
|
res.write(":ok\n\n");
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let isFirst = true;
|
||||||
|
let lastModel = "claude-sonnet-4";
|
||||||
|
let isComplete = false;
|
||||||
|
// Handle actual client disconnect (response stream closed)
|
||||||
|
res.on("close", () => {
|
||||||
|
if (!isComplete) {
|
||||||
|
// Client disconnected before response completed - kill subprocess
|
||||||
|
subprocess.kill();
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
// Handle streaming content deltas
|
||||||
|
subprocess.on("content_delta", (event) => {
|
||||||
|
const text = event.event.delta?.text || "";
|
||||||
|
if (text && !res.writableEnded) {
|
||||||
|
const chunk = {
|
||||||
|
id: `chatcmpl-${requestId}`,
|
||||||
|
object: "chat.completion.chunk",
|
||||||
|
created: Math.floor(Date.now() / 1000),
|
||||||
|
model: lastModel,
|
||||||
|
choices: [{
|
||||||
|
index: 0,
|
||||||
|
delta: {
|
||||||
|
role: isFirst ? "assistant" : undefined,
|
||||||
|
content: text,
|
||||||
|
},
|
||||||
|
finish_reason: null,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
||||||
|
isFirst = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Handle final assistant message (for model name)
|
||||||
|
subprocess.on("assistant", (message) => {
|
||||||
|
lastModel = message.message.model;
|
||||||
|
});
|
||||||
|
subprocess.on("result", (_result) => {
|
||||||
|
isComplete = true;
|
||||||
|
if (!res.writableEnded) {
|
||||||
|
// Send final done chunk with finish_reason
|
||||||
|
const doneChunk = createDoneChunk(requestId, lastModel);
|
||||||
|
res.write(`data: ${JSON.stringify(doneChunk)}\n\n`);
|
||||||
|
res.write("data: [DONE]\n\n");
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
subprocess.on("error", (error) => {
|
||||||
|
console.error("[Streaming] Error:", error.message);
|
||||||
|
if (!res.writableEnded) {
|
||||||
|
res.write(`data: ${JSON.stringify({
|
||||||
|
error: { message: error.message, type: "server_error", code: null },
|
||||||
|
})}\n\n`);
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
subprocess.on("close", (code) => {
|
||||||
|
// Subprocess exited - ensure response is closed
|
||||||
|
if (!res.writableEnded) {
|
||||||
|
if (code !== 0 && !isComplete) {
|
||||||
|
// Abnormal exit without result - send error
|
||||||
|
res.write(`data: ${JSON.stringify({
|
||||||
|
error: { message: `Process exited with code ${code}`, type: "server_error", code: null },
|
||||||
|
})}\n\n`);
|
||||||
|
}
|
||||||
|
res.write("data: [DONE]\n\n");
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
// Start the subprocess
|
||||||
|
subprocess.start(cliInput.prompt, {
|
||||||
|
model: cliInput.model,
|
||||||
|
sessionId: cliInput.sessionId,
|
||||||
|
}).catch((err) => {
|
||||||
|
console.error("[Streaming] Subprocess start error:", err);
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Handle non-streaming response
|
||||||
|
*/
|
||||||
|
async function handleNonStreamingResponse(res, subprocess, cliInput, requestId) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
let finalResult = null;
|
||||||
|
subprocess.on("result", (result) => {
|
||||||
|
finalResult = result;
|
||||||
|
});
|
||||||
|
subprocess.on("error", (error) => {
|
||||||
|
console.error("[NonStreaming] Error:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
error: {
|
||||||
|
message: error.message,
|
||||||
|
type: "server_error",
|
||||||
|
code: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
subprocess.on("close", (code) => {
|
||||||
|
if (finalResult) {
|
||||||
|
res.json(cliResultToOpenai(finalResult, requestId));
|
||||||
|
}
|
||||||
|
else if (!res.headersSent) {
|
||||||
|
res.status(500).json({
|
||||||
|
error: {
|
||||||
|
message: `Claude CLI exited with code ${code} without response`,
|
||||||
|
type: "server_error",
|
||||||
|
code: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
// Start the subprocess
|
||||||
|
subprocess
|
||||||
|
.start(cliInput.prompt, {
|
||||||
|
model: cliInput.model,
|
||||||
|
sessionId: cliInput.sessionId,
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
res.status(500).json({
|
||||||
|
error: {
|
||||||
|
message: error.message,
|
||||||
|
type: "server_error",
|
||||||
|
code: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Handle GET /v1/models
|
||||||
|
*
|
||||||
|
* Returns available models
|
||||||
|
*/
|
||||||
|
export function handleModels(_req, res) {
|
||||||
|
res.json({
|
||||||
|
object: "list",
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
id: "claude-opus-4",
|
||||||
|
object: "model",
|
||||||
|
owned_by: "anthropic",
|
||||||
|
created: Math.floor(Date.now() / 1000),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "claude-sonnet-4",
|
||||||
|
object: "model",
|
||||||
|
owned_by: "anthropic",
|
||||||
|
created: Math.floor(Date.now() / 1000),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "claude-haiku-4",
|
||||||
|
object: "model",
|
||||||
|
owned_by: "anthropic",
|
||||||
|
created: Math.floor(Date.now() / 1000),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Handle GET /health
|
||||||
|
*
|
||||||
|
* Health check endpoint
|
||||||
|
*/
|
||||||
|
export function handleHealth(_req, res) {
|
||||||
|
res.json({
|
||||||
|
status: "ok",
|
||||||
|
provider: "claude-code-cli",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=routes.js.map
|
||||||
@@ -0,0 +1,93 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Migration: chat_backup.jsonl ts-Werte von Container-Uptime-ms auf UNIX-ms umstellen.
|
||||||
|
|
||||||
|
Hintergrund: vor dem Fix nutzte _append_chat_backup() `asyncio.get_event_loop().time()`,
|
||||||
|
was Container-Monotonic ist (bei Restart wieder 0). Mischte sich mit App-side
|
||||||
|
`Date.now()` (echtes UNIX-ms) → falsche Sortierung in der App-History.
|
||||||
|
|
||||||
|
Strategie: ts < 1e12 (keine UNIX-ms) werden umgeschrieben. Anker = file-mtime,
|
||||||
|
decay 60 Sekunden pro Eintrag rueckwaerts. Datei-Reihenfolge bleibt erhalten
|
||||||
|
(append-only war chronologisch korrekt, nur ts-Werte waren Unsinn).
|
||||||
|
|
||||||
|
Vorhandene UNIX-ms-Eintraege (file_deleted-Marker, neue Eintraege ab Bridge-Fix)
|
||||||
|
werden unveraendert gelassen.
|
||||||
|
|
||||||
|
Idempotent: zweimal laufen lassen ist sicher — beim zweiten Mal sind alle ts
|
||||||
|
schon UNIX-ms und werden nicht angefasst.
|
||||||
|
|
||||||
|
Backup: schreibt erst chat_backup.jsonl.bak, dann atomar replace.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
UNIX_MS_THRESHOLD = 10 ** 12 # < 1e12 ms = vor 2001 = unrealistisch fuer UNIX
|
||||||
|
GAP_SECONDS = 60 # 1 Eintrag pro Minute rueckwaerts ab mtime
|
||||||
|
|
||||||
|
|
||||||
|
def migrate(path: Path) -> None:
|
||||||
|
if not path.exists():
|
||||||
|
print(f"Datei nicht da: {path}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
raw = path.read_text(encoding="utf-8").splitlines()
|
||||||
|
entries = []
|
||||||
|
for raw_line in raw:
|
||||||
|
s = raw_line.strip()
|
||||||
|
if not s:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
entries.append(json.loads(s))
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ueberspringe kaputte Zeile: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not entries:
|
||||||
|
print("Datei leer")
|
||||||
|
return
|
||||||
|
|
||||||
|
file_mtime_ms = int(os.path.getmtime(path) * 1000)
|
||||||
|
n = len(entries)
|
||||||
|
fixed = 0
|
||||||
|
|
||||||
|
# Wir bauen einen Ersatz-ts (file_mtime - gap*minutes_back) nur fuer
|
||||||
|
# Eintraege deren ts < UNIX_MS_THRESHOLD. file_deleted etc. mit echtem
|
||||||
|
# UNIX-ms bleiben unangetastet.
|
||||||
|
for i, entry in enumerate(entries):
|
||||||
|
ts = entry.get("ts", 0)
|
||||||
|
if not isinstance(ts, (int, float)) or ts < UNIX_MS_THRESHOLD:
|
||||||
|
# Synth-ts vergeben: aelteste = mtime - n*gap, neueste = mtime
|
||||||
|
new_ts = file_mtime_ms - (n - 1 - i) * GAP_SECONDS * 1000
|
||||||
|
entry["ts"] = new_ts
|
||||||
|
fixed += 1
|
||||||
|
|
||||||
|
if fixed == 0:
|
||||||
|
print(f"Nichts zu migrieren ({n} Eintraege, alle ts schon UNIX-ms)")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Backup
|
||||||
|
bak = path.with_suffix(path.suffix + ".bak")
|
||||||
|
shutil.copy2(path, bak)
|
||||||
|
print(f"Backup: {bak}")
|
||||||
|
|
||||||
|
# Atomic rewrite
|
||||||
|
tmp = path.with_suffix(path.suffix + ".tmp")
|
||||||
|
with open(tmp, "w", encoding="utf-8") as f:
|
||||||
|
for entry in entries:
|
||||||
|
f.write(json.dumps(entry, ensure_ascii=False) + "\n")
|
||||||
|
tmp.replace(path)
|
||||||
|
|
||||||
|
print(f"Migration fertig: {fixed}/{n} ts umgeschrieben")
|
||||||
|
print(f" aelteste neu : {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entries[0]['ts'] / 1000))}")
|
||||||
|
print(f" neueste neu : {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entries[-1]['ts'] / 1000))}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
default = Path("/var/lib/docker/volumes/aria-agent_aria-shared/_data/config/chat_backup.jsonl")
|
||||||
|
path = Path(sys.argv[1]) if len(sys.argv) > 1 else default
|
||||||
|
migrate(path)
|
||||||
Reference in New Issue
Block a user