diff --git a/CHANGELOG.md b/CHANGELOG.md
index 55e35cc..439e6cf 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,29 @@ Alle Änderungen am Projekt. Format: [Keep a Changelog](https://keepachangelog.c
---
+## [0.0.0.5] — 2026-03-13
+
+### Hinzugefügt
+
+**Diagnostic — Pipeline-Tab**
+- Neuer "Pipeline"-Tab im Log-Bereich — zeigt den kompletten Nachrichtenfluss wenn eine Chat-Nachricht über die Diagnostic-UI gesendet wird
+- Tracking aller Schritte: Senden → Gateway ACK → Streaming Deltas → Finale Antwort (oder Fehler)
+- Zeitmessung: Jeder Schritt zeigt Elapsed-Time seit Pipeline-Start
+- Farbcodierung: Blau (Schritte), Grün (Erfolg), Rot (Fehler)
+- 60s Timeout — markiert Pipeline als fehlgeschlagen wenn keine Antwort kommt
+- Funktioniert für Gateway-direkt und RVS-Nachrichten
+
+### Geändert
+
+**OpenClaw Config — Custom Provider Format**
+- `openclaw.json` nutzt `models.providers` (Object, nicht Array) mit `api: "openai-completions"`
+- Model-Einträge brauchen sowohl `id` als auch `name` Feld
+- `aria-setup.sh` schreibt korrekte Config mit Heredoc-Pattern (`'"'"'INNEREOF'"'"'`)
+- `DEFAULT_MODEL=proxy/claude-sonnet-4` — mit Provider-Prefix für Custom Provider
+- `OPENAI_BASE_URL` und `OPENAI_API_KEY` entfernt — OpenClaw ignoriert diese Env-Vars, nutzt nur `models.providers` Config
+
+---
+
## [0.0.0.4] — 2026-03-11 / 2026-03-12
### Hinzugefügt
diff --git a/aria-setup.sh b/aria-setup.sh
index fcf5424..f00d189 100755
--- a/aria-setup.sh
+++ b/aria-setup.sh
@@ -18,22 +18,64 @@ echo " aria-core laeuft."
# Permissions fixen — Docker-Volume gehoert root, OpenClaw laeuft als node
echo ""
-echo "[2/4] Fixe Permissions auf /home/node/.openclaw..."
+echo "[2/3] Fixe Permissions auf /home/node/.openclaw..."
docker exec -u root aria-core chown -R node:node /home/node/.openclaw
echo " Permissions OK."
-# Model auf openai-Proxy umstellen
+# OpenClaw Config schreiben — Custom Provider fuer claude-max-api-proxy
echo ""
-echo "[3/4] Setze Model auf openai/claude-sonnet-4-6 (via Proxy)..."
-docker exec aria-core openclaw models set openai/claude-sonnet-4-6
-echo " Model gesetzt."
+echo "[3/3] Schreibe openclaw.json (Proxy-Provider + Model)..."
+docker exec aria-core sh -c 'cat > /home/node/.openclaw/openclaw.json << '"'"'INNEREOF'"'"'
+{
+ "meta": {
+ "lastTouchedVersion": "2026.3.8"
+ },
+ "agents": {
+ "defaults": {
+ "model": {
+ "primary": "proxy/claude-sonnet-4"
+ },
+ "compaction": {
+ "mode": "safeguard"
+ },
+ "maxConcurrent": 4,
+ "subagents": {
+ "maxConcurrent": 8
+ }
+ }
+ },
+ "models": {
+ "providers": {
+ "proxy": {
+ "api": "openai-completions",
+ "baseUrl": "http://proxy:3456/v1",
+ "apiKey": "not-needed",
+ "models": [
+ { "id": "claude-sonnet-4", "name": "claude-sonnet-4" },
+ { "id": "claude-opus-4", "name": "claude-opus-4" }
+ ]
+ }
+ }
+ },
+ "messages": {
+ "ackReactionScope": "group-mentions"
+ },
+ "commands": {
+ "native": "auto",
+ "nativeSkills": "auto",
+ "restart": true,
+ "ownerDisplay": "raw"
+ }
+}
+INNEREOF'
+echo " Config geschrieben."
-# Gateway neustarten damit die Aenderung greift
+# Neustart damit Gateway die Config laedt
echo ""
-echo "[4/4] Starte aria-core neu damit Gateway das neue Model laedt..."
+echo "Starte aria-core neu..."
docker restart aria-core
echo ""
echo "=== Setup fertig ==="
echo "Teste mit: docker logs aria-core --tail 20"
-echo "Erwartete Zeile: 'agent model: openai/claude-sonnet-4-6'"
+echo "Erwartete Zeile: 'agent model: proxy/claude-sonnet-4'"
diff --git a/bridge/aria_bridge.py b/bridge/aria_bridge.py
index 5da437f..7f7b6a1 100644
--- a/bridge/aria_bridge.py
+++ b/bridge/aria_bridge.py
@@ -603,6 +603,9 @@ class ARIABridge:
return
frame_type = message.get("type", "")
+ logger.info("[core] <<< Frame: type=%s event=%s method=%s | %s",
+ frame_type, message.get("event", "-"), message.get("method", "-"),
+ raw_message[:200])
# ── Response auf unsere Requests (z.B. chat.send Ack) ──
if frame_type == "res":
diff --git a/diagnostic/index.html b/diagnostic/index.html
index 22a6033..85ab718 100644
--- a/diagnostic/index.html
+++ b/diagnostic/index.html
@@ -57,6 +57,10 @@
.log-entry.warn { color: #FFD60A; }
.log-entry.info { color: #AAB; }
.log-entry.debug { color: #555570; }
+ .log-entry.pipeline-step { color: #0096FF; border-left: 2px solid #0096FF; padding-left: 6px; margin: 2px 0; }
+ .log-entry.pipeline-ok { color: #34C759; border-left: 2px solid #34C759; padding-left: 6px; margin: 2px 0; }
+ .log-entry.pipeline-err { color: #FF3B30; border-left: 2px solid #FF3B30; padding-left: 6px; margin: 2px 0; }
+ .log-entry.pipeline-sep { color: #333; margin: 6px 0 2px; }
.chat-box { background: #080810; border: 1px solid #1E1E2E; border-radius: 6px;
min-height: 120px; max-height: 250px; overflow-y: auto; padding: 8px; margin-bottom: 8px; }
@@ -166,6 +170,7 @@
+
@@ -206,8 +212,8 @@
let ws;
let activeTab = 'all';
const DOCKER_TABS = ['gateway', 'proxy', 'bridge'];
- const autoScroll = { all: true, gateway: true, rvs: true, proxy: true, bridge: true, server: true };
- const logCounts = { all: 0, gateway: 0, rvs: 0, proxy: 0, bridge: 0, server: 0 };
+ const autoScroll = { all: true, gateway: true, rvs: true, proxy: true, bridge: true, server: true, pipeline: true };
+ const logCounts = { all: 0, gateway: 0, rvs: 0, proxy: 0, bridge: 0, server: 0, pipeline: 0 };
const logBoxes = {
all: document.getElementById('log-all'),
@@ -216,6 +222,7 @@
proxy: document.getElementById('log-proxy'),
bridge: document.getElementById('log-bridge'),
server: document.getElementById('log-server'),
+ pipeline: document.getElementById('log-pipeline'),
};
// Scroll-Pause pro aktivem Tab
@@ -269,6 +276,7 @@
if (source === 'proxy') return 'proxy';
if (source === 'bridge') return 'bridge';
if (source === 'server' || source === 'browser') return 'server';
+ if (source === 'pipeline') return 'pipeline';
return null;
}
@@ -544,6 +552,15 @@
const time = ts ? new Date(ts).toLocaleTimeString('de-DE') : new Date().toLocaleTimeString('de-DE');
const line = `${time} [${source}] ${message}`;
+ // Pipeline-Eintraege nur in Pipeline-Tab (nicht in Alle)
+ if (source === 'pipeline') {
+ const pipeLevel = level === 'error' ? 'pipeline-err' : level === 'info' && message.includes('>>>') ? 'pipeline-ok' : 'pipeline-step';
+ appendToLog('pipeline', pipeLevel, `${time} ${message}`);
+ logCounts.pipeline++;
+ document.getElementById('count-pipeline').textContent = logCounts.pipeline;
+ return;
+ }
+
// In "Alle" und in den passenden Tab schreiben
appendToLog('all', level, line);
const tab = mapSourceToTab(source);
diff --git a/diagnostic/server.js b/diagnostic/server.js
index df9019a..daf6d28 100644
--- a/diagnostic/server.js
+++ b/diagnostic/server.js
@@ -41,6 +41,47 @@ let rvsWs = null;
let reqIdCounter = 0;
const browserClients = new Set();
+// ── Pipeline Tracking ──────────────────────────────────
+let pipelineActive = false;
+let pipelineStartTime = 0;
+
+function plog(message, level) {
+ const elapsed = pipelineActive ? `+${Date.now() - pipelineStartTime}ms` : "";
+ const entry = { ts: new Date().toISOString(), level: level || "info", source: "pipeline", message: `${elapsed ? `[${elapsed}] ` : ""}${message}` };
+ logs.push(entry);
+ if (logs.length > 500) logs.shift();
+ console.log(`[PIPELINE] ${entry.message}`);
+ broadcast({ type: "log", entry });
+}
+
+let pipelineTimeout = null;
+
+function pipelineStart(method, text) {
+ // Falls noch eine Pipeline laeuft, beenden
+ if (pipelineActive) pipelineEnd(false, "Abgebrochen (neue Nachricht)");
+ pipelineActive = true;
+ pipelineStartTime = Date.now();
+ if (pipelineTimeout) clearTimeout(pipelineTimeout);
+ pipelineTimeout = setTimeout(() => {
+ if (pipelineActive) pipelineEnd(false, "Timeout — keine Antwort nach 60s");
+ }, 60000);
+ plog(`━━━ Pipeline Start: ${method} ━━━`);
+ plog(`Nachricht: "${text}"`);
+}
+
+function pipelineEnd(ok, detail) {
+ if (!pipelineActive) return;
+ if (pipelineTimeout) { clearTimeout(pipelineTimeout); pipelineTimeout = null; }
+ const elapsed = Date.now() - pipelineStartTime;
+ if (ok) {
+ plog(`>>> Fertig (${elapsed}ms): ${detail}`);
+ } else {
+ plog(`>>> FEHLER (${elapsed}ms): ${detail}`, "error");
+ }
+ plog(`━━━ Pipeline Ende ━━━`);
+ pipelineActive = false;
+}
+
function nextReqId() {
return `diag-${++reqIdCounter}`;
}
@@ -202,6 +243,10 @@ function handleGatewayMessage(msg) {
if (msg.type === "res") {
const status = msg.ok ? "OK" : `FEHLER: ${JSON.stringify(msg.error).slice(0, 100)}`;
log("info", "gateway", `Response [${msg.id}]: ${status}`);
+ if (pipelineActive) {
+ if (msg.ok) plog(`Gateway ACK [${msg.id}] — Nachricht angenommen`);
+ else plog(`Gateway NACK [${msg.id}] — ${JSON.stringify(msg.error).slice(0, 100)}`, "error");
+ }
broadcast({ type: "response", msg });
return;
}
@@ -214,6 +259,7 @@ function handleGatewayMessage(msg) {
const delta = payload.delta || payload.text || "";
if (delta) {
log("info", "gateway", `Delta: "${delta.slice(0, 60)}"`);
+ if (pipelineActive) plog(`Streaming Delta: "${delta.slice(0, 80)}"`);
broadcast({ type: "chat_delta", delta, payload });
}
return;
@@ -222,6 +268,7 @@ function handleGatewayMessage(msg) {
if (event === "chat:final") {
const text = payload.text || payload.message || "";
log("info", "gateway", `ANTWORT: "${text.slice(0, 200)}"`);
+ if (pipelineActive) pipelineEnd(true, `"${text.slice(0, 120)}"`);
broadcast({ type: "chat_final", text, payload });
return;
}
@@ -229,6 +276,7 @@ function handleGatewayMessage(msg) {
if (event === "chat:error") {
const error = payload.error || payload.message || "Unbekannt";
log("error", "gateway", `Chat-Fehler: ${error}`);
+ if (pipelineActive) pipelineEnd(false, error);
broadcast({ type: "chat_error", error, payload });
return;
}
@@ -238,9 +286,10 @@ function handleGatewayMessage(msg) {
}
}
-function sendToGateway(text) {
+function sendToGateway(text, isPipeline) {
if (!gatewayWs || gatewayWs.readyState !== WebSocket.OPEN) {
log("error", "gateway", "Nicht verbunden — kann nicht senden");
+ if (isPipeline) pipelineEnd(false, "Gateway nicht verbunden");
return false;
}
@@ -258,6 +307,7 @@ function sendToGateway(text) {
gatewayWs.send(JSON.stringify(msg));
log("info", "gateway", `chat.send [${reqId}]: "${text}"`);
+ if (isPipeline) plog(`chat.send [${reqId}] an Gateway gesendet — warte auf ACK...`);
return true;
}
@@ -294,7 +344,11 @@ function connectRVS(forcePlain) {
try {
const msg = JSON.parse(raw.toString());
if (msg.type === "chat" && msg.payload) {
- log("info", "rvs", `Chat von ${msg.payload.sender || "?"}: "${(msg.payload.text || "").slice(0, 100)}"`);
+ const sender = msg.payload.sender || "?";
+ log("info", "rvs", `Chat von ${sender}: "${(msg.payload.text || "").slice(0, 100)}"`);
+ if (pipelineActive && sender !== "diagnostic") {
+ pipelineEnd(true, `Antwort via RVS von ${sender}: "${(msg.payload.text || "").slice(0, 120)}"`);
+ }
broadcast({ type: "rvs_chat", msg });
} else if (msg.type === "heartbeat") {
// ignorieren
@@ -327,9 +381,10 @@ function connectRVS(forcePlain) {
});
}
-function sendToRVS(text) {
+function sendToRVS(text, isPipeline) {
if (!rvsWs || rvsWs.readyState !== WebSocket.OPEN) {
log("error", "rvs", "Nicht verbunden");
+ if (isPipeline) pipelineEnd(false, "RVS nicht verbunden");
return false;
}
@@ -339,6 +394,7 @@ function sendToRVS(text) {
timestamp: Date.now(),
}));
log("info", "rvs", `Gesendet via RVS: "${text}"`);
+ if (isPipeline) plog(`Nachricht an RVS gesendet — warte auf Antwort via RVS...`);
return true;
}
@@ -792,9 +848,11 @@ wss.on("connection", (ws) => {
const msg = JSON.parse(raw.toString());
if (msg.action === "test_gateway") {
- sendToGateway(msg.text || "aria lebst du noch?");
+ pipelineStart("Gateway", msg.text || "aria lebst du noch?");
+ sendToGateway(msg.text || "aria lebst du noch?", true);
} else if (msg.action === "test_rvs") {
- sendToRVS(msg.text || "aria lebst du noch?");
+ pipelineStart("RVS", msg.text || "aria lebst du noch?");
+ sendToRVS(msg.text || "aria lebst du noch?", true);
} else if (msg.action === "reconnect_gateway") {
connectGateway();
} else if (msg.action === "reconnect_rvs") {
diff --git a/docker-compose.yml b/docker-compose.yml
index 942a678..76abfb7 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -30,9 +30,7 @@ services:
environment:
- CANVAS_HOST=127.0.0.1
- OPENCLAW_GATEWAY_TOKEN=${ARIA_AUTH_TOKEN}
- - OPENAI_API_KEY=not-needed
- - OPENAI_BASE_URL=http://proxy:3456/v1
- - DEFAULT_MODEL=openai/claude-sonnet-4-6
+ - DEFAULT_MODEL=proxy/claude-sonnet-4
- RATE_LIMIT_PER_USER=30
- DISPLAY=:0
volumes: