diff --git a/diagnostic/index.html b/diagnostic/index.html
index 233f393..3ce566c 100644
--- a/diagnostic/index.html
+++ b/diagnostic/index.html
@@ -102,7 +102,12 @@
Nicht getestet
-
+
+
Verfuegbare Modelle:
+
+
+
+
@@ -275,6 +280,7 @@
} else {
addChat('error', msg.error, 'Claude Proxy Fehler');
}
+ if (msg.models && msg.models.length) showProxyModels(msg.models);
return;
}
if (msg.type === 'docker_logs') {
@@ -334,6 +340,7 @@
document.getElementById('proxy-status').textContent = STATUS_LABELS[proxy.status] || proxy.status;
document.getElementById('proxy-error').textContent = proxy.lastError || '';
document.getElementById('btn-proxy-test').disabled = proxy.status === 'testing';
+ if (proxy.models && proxy.models.length) showProxyModels(proxy.models);
// Buttons
document.getElementById('btn-gw').disabled = gw.status !== 'connected';
@@ -403,6 +410,19 @@
autoScroll[tab] = true;
}
+ function showProxyModels(models) {
+ const container = document.getElementById('proxy-models');
+ const list = document.getElementById('proxy-models-list');
+ const hint = document.getElementById('proxy-models-hint');
+ container.style.display = 'block';
+ list.innerHTML = models.map(m => {
+ const clean = m.replace('openai/', '');
+ return `${escapeHtml(m)}
`;
+ }).join('');
+ const cleanNames = models.map(m => m.replace('openai/', ''));
+ hint.textContent = `DEFAULT_MODEL fuer docker-compose.yml: ${cleanNames.join(' | ')}`;
+ }
+
function escapeHtml(str) {
return str.replace(/&/g,'&').replace(//g,'>');
}
diff --git a/diagnostic/server.js b/diagnostic/server.js
index 84239d2..bbd26a9 100644
--- a/diagnostic/server.js
+++ b/diagnostic/server.js
@@ -365,8 +365,14 @@ async function testProxy(prompt) {
}
const modelsData = await modelsRes.json();
- const modelCount = modelsData.data?.length || 0;
- log("info", "proxy", `Proxy erreichbar — ${modelCount} Model(s) verfuegbar`);
+ const models = (modelsData.data || []).map(m => m.id).filter(Boolean);
+ log("info", "proxy", `Proxy erreichbar — ${models.length} Model(s) verfuegbar`);
+
+ // Modellnamen loggen + OpenClaw-Config Hinweis
+ if (models.length > 0) {
+ log("info", "proxy", `Modelle: ${models.join(", ")}`);
+ log("info", "proxy", `Fuer docker-compose.yml (DEFAULT_MODEL): ${models.map(m => m.replace("openai/", "")).join(" | ")}`);
+ }
// Schritt 2: Chat Completion testen (kurzer Prompt)
const testPrompt = prompt || "Antworte mit genau einem Wort: Ping";
@@ -397,8 +403,9 @@ async function testProxy(prompt) {
state.proxy.status = "connected";
state.proxy.lastError = null;
+ state.proxy.models = models;
broadcastState();
- broadcast({ type: "proxy_result", ok: true, reply });
+ broadcast({ type: "proxy_result", ok: true, reply, models });
} catch (err) {
log("error", "proxy", `Fehler: ${err.message}`);