1340 lines
46 KiB
TypeScript
1340 lines
46 KiB
TypeScript
/**
|
|
* ChatScreen - Hauptchat-Oberflaeche
|
|
*
|
|
* Zeigt die Konversation mit ARIA, Texteingabe, Sprach-Button,
|
|
* Datei- und Kamera-Upload.
|
|
*/
|
|
|
|
import React, { useState, useEffect, useRef, useCallback, useMemo } from 'react';
|
|
import {
|
|
View,
|
|
Text,
|
|
TextInput,
|
|
TouchableOpacity,
|
|
FlatList,
|
|
KeyboardAvoidingView,
|
|
Platform,
|
|
StyleSheet,
|
|
Image,
|
|
ScrollView,
|
|
Modal,
|
|
ToastAndroid,
|
|
} from 'react-native';
|
|
import AsyncStorage from '@react-native-async-storage/async-storage';
|
|
import RNFS from 'react-native-fs';
|
|
import rvs, { RVSMessage, ConnectionState } from '../services/rvs';
|
|
import audioService from '../services/audio';
|
|
import wakeWordService from '../services/wakeword';
|
|
import updateService from '../services/updater';
|
|
import VoiceButton from '../components/VoiceButton';
|
|
import FileUpload, { FileData } from '../components/FileUpload';
|
|
import CameraUpload, { PhotoData } from '../components/CameraUpload';
|
|
import MessageText from '../components/MessageText';
|
|
import { RecordingResult, loadConvWindowMs, loadTtsSpeed, TTS_SPEED_DEFAULT } from '../services/audio';
|
|
import Geolocation from '@react-native-community/geolocation';
|
|
|
|
// --- Typen ---
|
|
|
|
interface Attachment {
|
|
type: 'image' | 'file' | 'audio';
|
|
name: string;
|
|
size?: number;
|
|
uri?: string; // Lokaler Pfad (file://) fuer Anzeige
|
|
mimeType?: string;
|
|
serverPath?: string; // Pfad auf dem Server (/shared/uploads/...) fuer Re-Download
|
|
}
|
|
|
|
interface ChatMessage {
|
|
id: string;
|
|
sender: 'user' | 'aria';
|
|
text: string;
|
|
timestamp: number;
|
|
attachments?: Attachment[];
|
|
/** Bridge-Message-ID zur Zuordnung von TTS-Audio */
|
|
messageId?: string;
|
|
/** Lokaler Pfad zur gecachten TTS-Audio-Datei (file://...) */
|
|
audioPath?: string;
|
|
}
|
|
|
|
// --- Konstanten ---
|
|
|
|
const CHAT_STORAGE_KEY = 'aria_chat_messages';
|
|
const MAX_STORED_MESSAGES = 500;
|
|
const MAX_MEMORY_MESSAGES = 500;
|
|
|
|
// Hilfe: Messages-Array auf Max kappen (aelteste raus) — verhindert OOM
|
|
// im Gespraechsmodus bei sehr vielen Nachrichten.
|
|
const capMessages = (msgs: ChatMessage[]): ChatMessage[] =>
|
|
msgs.length > MAX_MEMORY_MESSAGES ? msgs.slice(-MAX_MEMORY_MESSAGES) : msgs;
|
|
const DEFAULT_ATTACHMENT_DIR = `${RNFS.DocumentDirectoryPath}/chat_attachments`;
|
|
const STORAGE_PATH_KEY = 'aria_attachment_storage_path';
|
|
|
|
async function getAttachmentDir(): Promise<string> {
|
|
try {
|
|
const saved = await AsyncStorage.getItem(STORAGE_PATH_KEY);
|
|
return saved || DEFAULT_ATTACHMENT_DIR;
|
|
} catch { return DEFAULT_ATTACHMENT_DIR; }
|
|
}
|
|
|
|
/** Speichert Base64-Daten als Datei, gibt file:// Pfad zurueck */
|
|
async function persistAttachment(base64Data: string, msgId: string, fileName: string): Promise<string> {
|
|
const cacheDir = await getAttachmentDir();
|
|
await RNFS.mkdir(cacheDir);
|
|
// Dateiendung aus originalem Dateinamen oder Fallback
|
|
const ext = fileName.includes('.') ? fileName.split('.').pop() : 'bin';
|
|
const safeName = `${msgId}_${fileName.replace(/[^a-zA-Z0-9._-]/g, '_')}`;
|
|
const filePath = `${cacheDir}/${safeName}`;
|
|
await RNFS.writeFile(filePath, base64Data, 'base64');
|
|
return `file://${filePath}`;
|
|
}
|
|
|
|
/** Prueft ob eine lokale Datei noch existiert */
|
|
async function checkFileExists(uri: string): Promise<boolean> {
|
|
if (!uri || !uri.startsWith('file://')) return false;
|
|
return RNFS.exists(uri.replace('file://', ''));
|
|
}
|
|
|
|
// --- Komponente ---
|
|
|
|
const ChatScreen: React.FC = () => {
|
|
const [messages, setMessages] = useState<ChatMessage[]>([]);
|
|
const [inputText, setInputText] = useState('');
|
|
const [connectionState, setConnectionState] = useState<ConnectionState>('disconnected');
|
|
const [showFileUpload, setShowFileUpload] = useState(false);
|
|
const [showCameraUpload, setShowCameraUpload] = useState(false);
|
|
const [gpsEnabled, setGpsEnabled] = useState(false);
|
|
const [wakeWordActive, setWakeWordActive] = useState(false);
|
|
const [fullscreenImage, setFullscreenImage] = useState<string | null>(null);
|
|
const [searchQuery, setSearchQuery] = useState('');
|
|
const [searchVisible, setSearchVisible] = useState(false);
|
|
const [pendingAttachments, setPendingAttachments] = useState<{file: any, isPhoto: boolean}[]>([]);
|
|
const [agentActivity, setAgentActivity] = useState<{activity: string, tool: string}>({activity: 'idle', tool: ''});
|
|
// Service-Status (Gamebox: F5-TTS / Whisper Lade-Status) + Banner-Sichtbarkeit
|
|
const [serviceStatus, setServiceStatus] = useState<Record<string, {state: string, model?: string, loadSeconds?: number, error?: string}>>({});
|
|
const [serviceBannerDismissed, setServiceBannerDismissed] = useState(false);
|
|
// Gerätelokale TTS-Config: globaler Toggle (aus Settings) + temporäres Muten (Mund-Button)
|
|
const [ttsDeviceEnabled, setTtsDeviceEnabled] = useState(true);
|
|
const [ttsMuted, setTtsMuted] = useState(false);
|
|
// Gerätelokale XTTS-Voice-Wahl (bevorzugt gegenueber dem globalen Default)
|
|
const localXttsVoiceRef = useRef<string>('');
|
|
// Geraetelokale TTS-Wiedergabegeschwindigkeit (speed-Param an F5-TTS)
|
|
const ttsSpeedRef = useRef<number>(TTS_SPEED_DEFAULT);
|
|
// Spiegelung der TTS-Settings in einer Ref — damit die onMessage-Closure
|
|
// (useEffect mit []-deps) IMMER die aktuellen Werte sieht. Ohne Ref
|
|
// bliebe canPlay auf dem Mount-Initial-Wert haengen (mute ignoriert,
|
|
// oder AsyncStorage-Load nicht beruecksichtigt).
|
|
const ttsCanPlayRef = useRef<boolean>(true);
|
|
|
|
const flatListRef = useRef<FlatList>(null);
|
|
const messageIdCounter = useRef(0);
|
|
|
|
// Eindeutige Message-ID generieren
|
|
const nextId = (): string => {
|
|
messageIdCounter.current += 1;
|
|
return `msg_${Date.now()}_${messageIdCounter.current}`;
|
|
};
|
|
|
|
// TTS-Settings beim Mount + bei Screen-Fokus neu laden (damit Settings-Toggle sofort greift)
|
|
useEffect(() => {
|
|
const loadTtsSettings = async () => {
|
|
const enabled = await AsyncStorage.getItem('aria_tts_enabled');
|
|
setTtsDeviceEnabled(enabled !== 'false'); // default true
|
|
const muted = await AsyncStorage.getItem('aria_tts_muted');
|
|
setTtsMuted(muted === 'true'); // default false
|
|
const voice = await AsyncStorage.getItem('aria_xtts_voice');
|
|
localXttsVoiceRef.current = voice || '';
|
|
ttsSpeedRef.current = await loadTtsSpeed();
|
|
};
|
|
loadTtsSettings();
|
|
// Poll alle 2s um Settings-Aenderung mitzubekommen (einfache Loesung ohne Context)
|
|
const interval = setInterval(loadTtsSettings, 2000);
|
|
return () => clearInterval(interval);
|
|
}, []);
|
|
|
|
// Wake Word: einmalig laden + Porcupine vorbereiten (wenn Access Key gesetzt)
|
|
useEffect(() => {
|
|
wakeWordService.loadFromStorage().catch(() => {});
|
|
}, []);
|
|
|
|
// ttsCanPlayRef live aktuell halten — Closure in onMessage unten liest
|
|
// darueber statt direkt ttsDeviceEnabled/ttsMuted (sonst stale).
|
|
useEffect(() => {
|
|
ttsCanPlayRef.current = ttsDeviceEnabled && !ttsMuted;
|
|
}, [ttsDeviceEnabled, ttsMuted]);
|
|
|
|
const toggleMute = useCallback(() => {
|
|
setTtsMuted(prev => {
|
|
const next = !prev;
|
|
AsyncStorage.setItem('aria_tts_muted', String(next));
|
|
// Bei Muten sofort laufende Wiedergabe stoppen
|
|
if (next) audioService.stopPlayback();
|
|
return next;
|
|
});
|
|
}, []);
|
|
|
|
// Chat-Verlauf aus AsyncStorage laden
|
|
const isInitialLoad = useRef(true);
|
|
useEffect(() => {
|
|
const loadMessages = async () => {
|
|
try {
|
|
const stored = await AsyncStorage.getItem(CHAT_STORAGE_KEY);
|
|
console.log('[Chat] AsyncStorage geladen:', stored ? `${stored.length} Bytes` : 'leer');
|
|
if (stored) {
|
|
const parsed: ChatMessage[] = JSON.parse(stored);
|
|
if (Array.isArray(parsed) && parsed.length > 0) {
|
|
console.log('[Chat] ${parsed.length} Nachrichten geladen');
|
|
setMessages(parsed);
|
|
const maxId = parsed.reduce((max, msg) => {
|
|
const num = parseInt(msg.id.split('_').pop() || '0', 10);
|
|
return num > max ? num : max;
|
|
}, 0);
|
|
messageIdCounter.current = maxId;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
console.error('[Chat] Fehler beim Laden des Verlaufs:', err);
|
|
} finally {
|
|
isInitialLoad.current = false;
|
|
}
|
|
};
|
|
loadMessages().then(async () => {
|
|
// Auto-Re-Download: fehlende Anhänge vom Server nachladen (wenn aktiviert)
|
|
const autoDownload = await AsyncStorage.getItem('aria_auto_download');
|
|
if (autoDownload === 'false') return;
|
|
setTimeout(() => {
|
|
setMessages(prev => {
|
|
const missing: {id: string, serverPath: string}[] = [];
|
|
for (const msg of prev) {
|
|
for (const att of msg.attachments || []) {
|
|
if (att.serverPath && !att.uri) {
|
|
missing.push({ id: msg.id, serverPath: att.serverPath });
|
|
}
|
|
}
|
|
}
|
|
if (missing.length > 0) {
|
|
console.log(`[Chat] ${missing.length} fehlende Anhaenge — lade nach...`);
|
|
for (const m of missing) {
|
|
rvs.send('file_request' as any, { serverPath: m.serverPath, requestId: m.id });
|
|
}
|
|
}
|
|
return prev;
|
|
});
|
|
}, 2000); // Warten bis RVS verbunden ist
|
|
});
|
|
}, []);
|
|
|
|
// RVS-Nachrichten abonnieren
|
|
useEffect(() => {
|
|
const unsubMessage = rvs.onMessage((message: RVSMessage) => {
|
|
// file_saved: Bridge meldet Server-Pfad — in Attachment merken fuer Re-Download
|
|
if (message.type === 'file_saved') {
|
|
const serverPath = (message.payload.serverPath as string) || '';
|
|
const name = (message.payload.name as string) || '';
|
|
if (serverPath) {
|
|
setMessages(prev => prev.map(m => ({
|
|
...m,
|
|
attachments: m.attachments?.map(a =>
|
|
a.name === name && !a.serverPath ? { ...a, serverPath } : a
|
|
),
|
|
})));
|
|
}
|
|
return;
|
|
}
|
|
|
|
// file_response: Re-Download von Server — lokal speichern
|
|
if (message.type === 'file_response') {
|
|
const reqId = (message.payload.requestId as string) || '';
|
|
const b64 = (message.payload.base64 as string) || '';
|
|
const serverPath = (message.payload.serverPath as string) || '';
|
|
if (b64 && reqId) {
|
|
const fileName = (message.payload.name as string) || 'download';
|
|
persistAttachment(b64, reqId, fileName).then(filePath => {
|
|
setMessages(prev => prev.map(m => ({
|
|
...m,
|
|
attachments: m.attachments?.map(a =>
|
|
a.serverPath === serverPath ? { ...a, uri: filePath } : a
|
|
),
|
|
})));
|
|
}).catch(() => {});
|
|
}
|
|
return;
|
|
}
|
|
|
|
if (message.type === 'chat') {
|
|
const sender = (message.payload.sender as string) || '';
|
|
|
|
// STT-Ergebnis: Transkribierten Text in die Sprach-Bubble schreiben
|
|
if (sender === 'stt') {
|
|
const sttText = (message.payload.text as string) || '';
|
|
if (sttText) {
|
|
setMessages(prev => prev.map(m =>
|
|
m.sender === 'user' && m.text.includes('Spracheingabe wird verarbeitet')
|
|
? { ...m, text: `\uD83C\uDFA4 ${sttText}` }
|
|
: m
|
|
));
|
|
}
|
|
return;
|
|
}
|
|
|
|
// Eigene App-Nachrichten ignorieren (werden lokal hinzugefuegt)
|
|
if (sender === 'user') return;
|
|
|
|
// Diagnostic-Nachrichten als User-Nachricht anzeigen
|
|
if (sender === 'diagnostic') {
|
|
const diagText = (message.payload.text as string) || '';
|
|
if (diagText) {
|
|
setMessages(prev => capMessages([...prev, {
|
|
id: nextId(),
|
|
sender: 'user',
|
|
text: diagText,
|
|
timestamp: message.timestamp,
|
|
}]));
|
|
}
|
|
return;
|
|
}
|
|
|
|
const text = (message.payload.text as string) || '';
|
|
const ts = message.timestamp;
|
|
// Duplikat-Schutz: gleicher Text innerhalb 5s ignorieren
|
|
setMessages(prev => {
|
|
const isDuplicate = prev.some(m =>
|
|
m.sender === 'aria' && m.text === text && Math.abs(m.timestamp - ts) < 5000
|
|
);
|
|
if (isDuplicate) return prev;
|
|
const ariaMsg: ChatMessage = {
|
|
id: nextId(),
|
|
sender: 'aria',
|
|
text,
|
|
timestamp: ts,
|
|
attachments: message.payload.attachments as Attachment[] | undefined,
|
|
messageId: (message.payload.messageId as string) || undefined,
|
|
};
|
|
return capMessages([...prev, ariaMsg]);
|
|
});
|
|
}
|
|
|
|
// TTS-Audio abspielen wenn vorhanden — respektiert geraetelokalen Mute/Disable
|
|
// WICHTIG: via Ref statt direkt state lesen, sonst ist's stale (Closure-Bug).
|
|
const canPlay = ttsCanPlayRef.current;
|
|
if (message.type === 'audio_pcm' || (message.type === 'audio' && message.payload.base64)) {
|
|
console.log('[Chat] audio-msg canPlay=%s (enabled=%s muted=%s)',
|
|
canPlay, ttsDeviceEnabled, ttsMuted);
|
|
}
|
|
if (message.type === 'audio' && message.payload.base64) {
|
|
const b64 = message.payload.base64 as string;
|
|
const refId = (message.payload.messageId as string) || '';
|
|
if (canPlay) audioService.playAudio(b64);
|
|
// Cache IMMER schreiben — Play-Button soll auch bei Mute spaeter funktionieren
|
|
if (refId) {
|
|
audioService.cacheAudio(b64, refId).then(audioPath => {
|
|
if (!audioPath) return;
|
|
setMessages(prev => prev.map(m =>
|
|
m.messageId === refId ? { ...m, audioPath } : m
|
|
));
|
|
}).catch(() => {});
|
|
}
|
|
}
|
|
|
|
// XTTS PCM-Stream: Cache IMMER bauen, Playback nur wenn nicht gemutet
|
|
if (message.type === ('audio_pcm' as any)) {
|
|
const p = { ...(message.payload as any), silent: !canPlay };
|
|
const refId = (p.messageId as string) || '';
|
|
audioService.handlePcmChunk(p).then((audioPath: any) => {
|
|
if (p.final && audioPath && refId) {
|
|
setMessages(prev => prev.map(m =>
|
|
m.messageId === refId ? { ...m, audioPath } : m
|
|
));
|
|
}
|
|
}).catch(() => {});
|
|
}
|
|
|
|
// Thinking-Indicator Status von der Bridge
|
|
if (message.type === 'agent_activity') {
|
|
const activity = (message.payload.activity as string) || 'idle';
|
|
const tool = (message.payload.tool as string) || '';
|
|
setAgentActivity({ activity, tool });
|
|
}
|
|
|
|
// Voice-Config aus Diagnostic — setzt die lokale App-Stimme auf den
|
|
// gerade in Diagnostic gewaehlten Wert zurueck. User-Wahl in der App
|
|
// wird dadurch ueberschrieben.
|
|
if (message.type === ('config' as any)) {
|
|
const newVoice = ((message.payload as any).xttsVoice as string) ?? '';
|
|
localXttsVoiceRef.current = newVoice;
|
|
AsyncStorage.setItem('aria_xtts_voice', newVoice);
|
|
}
|
|
|
|
// XTTS-Bridge meldet Stimme fertig geladen (kurzer Status-Toast)
|
|
if (message.type === ('voice_ready' as any)) {
|
|
const v = ((message.payload as any).voice as string) ?? '';
|
|
const err = (message.payload as any).error as string | undefined;
|
|
if (err) {
|
|
ToastAndroid.show(`Stimme "${v}" Fehler: ${err}`, ToastAndroid.LONG);
|
|
} else {
|
|
ToastAndroid.show(`Stimme "${v || 'Standard'}" bereit`, ToastAndroid.SHORT);
|
|
}
|
|
}
|
|
|
|
// Gamebox-Bridges (f5tts/whisper) melden Lade-Status — Banner oben
|
|
if (message.type === ('service_status' as any)) {
|
|
const p = message.payload as any;
|
|
const svc = (p?.service as string) || '';
|
|
if (!svc) return;
|
|
setServiceStatus(prev => ({
|
|
...prev,
|
|
[svc]: {
|
|
state: (p?.state as string) || 'unknown',
|
|
model: p?.model as string | undefined,
|
|
loadSeconds: p?.loadSeconds as number | undefined,
|
|
error: p?.error as string | undefined,
|
|
},
|
|
}));
|
|
// Bei neuer Loading-Phase Banner wieder aktivieren
|
|
if (p?.state === 'loading') setServiceBannerDismissed(false);
|
|
}
|
|
});
|
|
|
|
const unsubState = rvs.onStateChange((state) => {
|
|
setConnectionState(state);
|
|
});
|
|
|
|
// Initalen Status setzen
|
|
setConnectionState(rvs.getState());
|
|
|
|
return () => {
|
|
unsubMessage();
|
|
unsubState();
|
|
};
|
|
}, []);
|
|
|
|
// Auto-Update: Bei App-Start pruefen
|
|
useEffect(() => {
|
|
const unsubUpdate = updateService.onUpdateAvailable((info) => {
|
|
updateService.promptUpdate(info);
|
|
});
|
|
// Nach 5s pruefen (RVS muss erst verbunden sein)
|
|
const timer = setTimeout(() => updateService.checkForUpdate(), 5000);
|
|
return () => { unsubUpdate(); clearTimeout(timer); };
|
|
}, []);
|
|
|
|
// Gespraechsmodus: Nach TTS-Wiedergabe automatisch Aufnahme starten
|
|
useEffect(() => {
|
|
const unsubPlayback = audioService.onPlaybackFinished(() => {
|
|
if (wakeWordService.isActive()) {
|
|
wakeWordService.resume();
|
|
}
|
|
});
|
|
return () => unsubPlayback();
|
|
}, []);
|
|
|
|
// Wake Word / Gespraechsmodus: Auto-Aufnahme starten
|
|
useEffect(() => {
|
|
const unsubWake = wakeWordService.onWakeWord(async () => {
|
|
console.log('[Chat] Gespraechsmodus — starte Auto-Aufnahme');
|
|
// Conversation-Window: User hat X Sekunden um anzufangen, sonst Konversation aus
|
|
const windowMs = await loadConvWindowMs();
|
|
const started = await audioService.startRecording(true, windowMs);
|
|
if (!started) {
|
|
// Mikrofon nicht verfuegbar, naechsten Versuch
|
|
wakeWordService.resume();
|
|
}
|
|
});
|
|
|
|
// Auto-Stop Callback: wenn Stille erkannt → Aufnahme senden + Wake Word wieder starten
|
|
const unsubSilence = audioService.onSilenceDetected(async () => {
|
|
const result = await audioService.stopRecording();
|
|
if (result && result.durationMs > 500) {
|
|
// User hat im Fenster gesprochen → Sprachnachricht senden
|
|
const location = await getCurrentLocation();
|
|
const userMsg: ChatMessage = {
|
|
id: nextId(),
|
|
sender: 'user',
|
|
text: '🎙 Spracheingabe wird verarbeitet...',
|
|
timestamp: Date.now(),
|
|
attachments: [{ type: 'audio', name: 'Sprachaufnahme' }],
|
|
};
|
|
setMessages(prev => capMessages([...prev, userMsg]));
|
|
rvs.send('audio', {
|
|
base64: result.base64,
|
|
durationMs: result.durationMs,
|
|
mimeType: result.mimeType,
|
|
voice: localXttsVoiceRef.current,
|
|
speed: ttsSpeedRef.current,
|
|
...(location && { location }),
|
|
});
|
|
// resume() wird durch onPlaybackFinished nach ARIAs Antwort getriggert.
|
|
} else {
|
|
// Kein Speech im Window → Konversation beenden (Ohr geht aus oder
|
|
// bleibt armed wenn Wake Word verfuegbar)
|
|
wakeWordService.endConversation();
|
|
// UI-State synchron halten
|
|
if (!wakeWordService.isActive()) setWakeWordActive(false);
|
|
}
|
|
});
|
|
|
|
return () => {
|
|
unsubWake();
|
|
unsubSilence();
|
|
};
|
|
}, [wakeWordActive]);
|
|
|
|
// Wake Word Toggle Handler
|
|
const toggleWakeWord = useCallback(async () => {
|
|
if (wakeWordActive) {
|
|
// Vor Porcupine-Stop: eventuelle laufende Aufnahme abbrechen. Sonst
|
|
// bleibt audioService.recordingState=='recording' haengen und der
|
|
// normale Aufnahme-Button wirkt nicht mehr (startRecording lehnt
|
|
// ab weil "Aufnahme laeuft bereits").
|
|
try { await audioService.stopRecording(); } catch {}
|
|
await wakeWordService.stop();
|
|
setWakeWordActive(false);
|
|
} else {
|
|
const started = await wakeWordService.start();
|
|
setWakeWordActive(started);
|
|
}
|
|
}, [wakeWordActive]);
|
|
|
|
// Chat-Verlauf in AsyncStorage speichern (debounced, nur nach initialem Laden)
|
|
const saveTimer = useRef<ReturnType<typeof setTimeout> | null>(null);
|
|
useEffect(() => {
|
|
if (messages.length === 0 || isInitialLoad.current) return;
|
|
// Debounce: 1s warten damit persistAttachment fertig werden kann
|
|
if (saveTimer.current) clearTimeout(saveTimer.current);
|
|
saveTimer.current = setTimeout(() => {
|
|
const toStore = messages.slice(-MAX_STORED_MESSAGES).map(msg => ({
|
|
...msg,
|
|
attachments: msg.attachments?.map(att => ({
|
|
...att,
|
|
// Nur file:// URIs speichern, data: URIs rausfiltern (zu gross fuer AsyncStorage)
|
|
uri: att.uri?.startsWith('file://') ? att.uri : undefined,
|
|
})),
|
|
}));
|
|
const json = JSON.stringify(toStore);
|
|
// Sicherheitscheck: nicht speichern wenn >4MB (AsyncStorage Limit)
|
|
if (json.length > 4 * 1024 * 1024) {
|
|
console.warn('[Chat] Speicher zu gross, kuerze auf 100 Nachrichten');
|
|
const shortened = JSON.stringify(toStore.slice(-100));
|
|
AsyncStorage.setItem(CHAT_STORAGE_KEY, shortened).catch(() => {});
|
|
} else {
|
|
AsyncStorage.setItem(CHAT_STORAGE_KEY, json).catch(err =>
|
|
console.error('[Chat] Speichern fehlgeschlagen:', err),
|
|
);
|
|
}
|
|
}, 1000);
|
|
return () => { if (saveTimer.current) clearTimeout(saveTimer.current); };
|
|
}, [messages]);
|
|
|
|
// Inverted FlatList: neueste Nachrichten unten, kein manuelles Scrollen noetig
|
|
const invertedMessages = useMemo(() => [...messages].reverse(), [messages]);
|
|
|
|
// GPS-Position holen (optional)
|
|
const getCurrentLocation = useCallback((): Promise<{ lat: number; lon: number } | null> => {
|
|
if (!gpsEnabled) return Promise.resolve(null);
|
|
|
|
return new Promise((resolve) => {
|
|
Geolocation.getCurrentPosition(
|
|
(position) => {
|
|
resolve({
|
|
lat: position.coords.latitude,
|
|
lon: position.coords.longitude,
|
|
});
|
|
},
|
|
(_error) => {
|
|
resolve(null);
|
|
},
|
|
{ enableHighAccuracy: false, timeout: 5000 },
|
|
);
|
|
});
|
|
}, [gpsEnabled]);
|
|
|
|
// --- Nachricht senden ---
|
|
|
|
const sendTextMessage = useCallback(async () => {
|
|
const text = inputText.trim();
|
|
|
|
// Wenn pending Anhaenge vorhanden → Anhaenge + Text zusammen senden
|
|
if (pendingAttachments.length > 0) {
|
|
sendPendingAttachments(text);
|
|
return;
|
|
}
|
|
|
|
if (!text) return;
|
|
|
|
setInputText('');
|
|
|
|
const location = await getCurrentLocation();
|
|
|
|
const userMsg: ChatMessage = {
|
|
id: nextId(),
|
|
sender: 'user',
|
|
text,
|
|
timestamp: Date.now(),
|
|
};
|
|
setMessages(prev => capMessages([...prev, userMsg]));
|
|
|
|
// An RVS senden — mit geraetelokaler Voice (Bridge nutzt sie fuer die Antwort)
|
|
rvs.send('chat', {
|
|
text,
|
|
voice: localXttsVoiceRef.current,
|
|
speed: ttsSpeedRef.current,
|
|
...(location && { location }),
|
|
});
|
|
}, [inputText, getCurrentLocation, pendingAttachments, sendPendingAttachments]);
|
|
|
|
// Anfrage abbrechen — sofort lokalen Indicator weg, Bridge triggert doctor --fix
|
|
const cancelRequest = useCallback(() => {
|
|
setAgentActivity({ activity: 'idle', tool: '' });
|
|
rvs.send('cancel_request' as any, {});
|
|
}, []);
|
|
|
|
// Sprachaufnahme abgeschlossen
|
|
const handleVoiceRecording = useCallback(async (result: RecordingResult) => {
|
|
const location = await getCurrentLocation();
|
|
|
|
const userMsg: ChatMessage = {
|
|
id: nextId(),
|
|
sender: 'user',
|
|
text: '🎙 Spracheingabe wird verarbeitet...',
|
|
timestamp: Date.now(),
|
|
};
|
|
setMessages(prev => capMessages([...prev, userMsg]));
|
|
|
|
rvs.send('audio', {
|
|
base64: result.base64,
|
|
durationMs: result.durationMs,
|
|
mimeType: result.mimeType,
|
|
...(location && { location }),
|
|
});
|
|
}, [getCurrentLocation]);
|
|
|
|
// Datei auswaehlen → zur Pending-Liste hinzufuegen
|
|
const handleFileSelected = useCallback(async (file: FileData) => {
|
|
setShowFileUpload(false);
|
|
setPendingAttachments(prev => [...prev, { file, isPhoto: false }]);
|
|
}, []);
|
|
|
|
// Foto auswaehlen → zur Pending-Liste hinzufuegen
|
|
const handlePhotoSelected = useCallback(async (photo: PhotoData) => {
|
|
setShowCameraUpload(false);
|
|
setPendingAttachments(prev => [...prev, { file: photo, isPhoto: true }]);
|
|
}, []);
|
|
|
|
// Alle Pending Anhaenge + Text senden
|
|
const sendPendingAttachments = useCallback(async (messageText: string) => {
|
|
if (pendingAttachments.length === 0) return;
|
|
const location = await getCurrentLocation();
|
|
const msgId = nextId();
|
|
|
|
// Alle Attachments fuer die Chat-Nachricht sammeln
|
|
const attachments: Attachment[] = [];
|
|
for (const { file, isPhoto } of pendingAttachments) {
|
|
const isImage = isPhoto || (file.type && file.type.startsWith('image/'));
|
|
const name = isPhoto ? file.fileName : file.name;
|
|
const base64 = file.base64 || '';
|
|
const mimeType = file.type || '';
|
|
const imageUri = isImage && base64 ? `data:${mimeType};base64,${base64}` : file.uri;
|
|
|
|
attachments.push({
|
|
type: isImage ? 'image' : 'file',
|
|
name,
|
|
size: file.size,
|
|
uri: imageUri,
|
|
mimeType,
|
|
});
|
|
}
|
|
|
|
// Chat-Nachricht mit allen Anhaengen
|
|
const userMsg: ChatMessage = {
|
|
id: msgId,
|
|
sender: 'user',
|
|
text: messageText || `${pendingAttachments.length} Anhang/Anhaenge`,
|
|
timestamp: Date.now(),
|
|
attachments,
|
|
};
|
|
setMessages(prev => capMessages([...prev, userMsg]));
|
|
|
|
// Alle Dateien an RVS senden + auf Disk speichern
|
|
for (const { file, isPhoto } of pendingAttachments) {
|
|
const name = isPhoto ? file.fileName : file.name;
|
|
const base64 = file.base64 || '';
|
|
const mimeType = file.type || '';
|
|
|
|
// Auf Disk speichern
|
|
if (base64) {
|
|
persistAttachment(base64, msgId + '_' + name, name).then(filePath => {
|
|
setMessages(prev => prev.map(m =>
|
|
m.id === msgId ? { ...m, attachments: m.attachments?.map(a =>
|
|
a.name === name && !a.uri?.startsWith('file://') ? { ...a, uri: filePath } : a
|
|
)} : m
|
|
));
|
|
}).catch(() => {});
|
|
}
|
|
|
|
// An RVS senden
|
|
rvs.send('file', {
|
|
name,
|
|
type: mimeType,
|
|
size: file.size,
|
|
base64,
|
|
...(isPhoto && file.width && { width: file.width, height: file.height }),
|
|
...(location && { location }),
|
|
});
|
|
}
|
|
|
|
// Text als separate Nachricht (damit ARIA weiss was zu tun ist)
|
|
if (messageText) {
|
|
rvs.send('chat', {
|
|
text: messageText,
|
|
voice: localXttsVoiceRef.current,
|
|
speed: ttsSpeedRef.current,
|
|
...(location && { location }),
|
|
});
|
|
}
|
|
|
|
setPendingAttachments([]);
|
|
setInputText('');
|
|
}, [pendingAttachments, getCurrentLocation]);
|
|
|
|
// --- Rendering ---
|
|
|
|
const renderMessage = ({ item }: { item: ChatMessage }) => {
|
|
const isUser = item.sender === 'user';
|
|
const time = new Date(item.timestamp).toLocaleTimeString('de-DE', {
|
|
hour: '2-digit',
|
|
minute: '2-digit',
|
|
});
|
|
|
|
return (
|
|
<View style={[styles.messageBubble, isUser ? styles.userBubble : styles.ariaBubble]}>
|
|
{/* Anhang-Vorschau */}
|
|
{item.attachments?.map((att, idx) => (
|
|
<View key={idx}>
|
|
{att.type === 'image' && att.uri ? (
|
|
<TouchableOpacity onPress={() => setFullscreenImage(att.uri || null)} activeOpacity={0.8}>
|
|
<Image
|
|
source={{ uri: att.uri }}
|
|
style={styles.attachmentImage}
|
|
resizeMode="cover"
|
|
onError={() => {
|
|
setMessages(prev => prev.map(m =>
|
|
m.id === item.id ? { ...m, attachments: m.attachments?.map((a, i) =>
|
|
i === idx ? { ...a, uri: undefined } : a
|
|
)} : m
|
|
));
|
|
}}
|
|
/>
|
|
</TouchableOpacity>
|
|
) : att.type === 'image' && !att.uri ? (
|
|
<TouchableOpacity
|
|
style={styles.attachmentFile}
|
|
onPress={() => {
|
|
if (att.serverPath) {
|
|
rvs.send('file_request' as any, { serverPath: att.serverPath, requestId: item.id });
|
|
}
|
|
}}
|
|
>
|
|
<Text style={styles.attachmentFileIcon}>{'\uD83D\uDDBC\uFE0F'}</Text>
|
|
<Text style={styles.attachmentFileName} numberOfLines={1}>{att.name}</Text>
|
|
<Text style={styles.attachmentFileSize}>
|
|
{att.serverPath ? '(tippen zum Laden)' : '(nicht verfuegbar)'}
|
|
</Text>
|
|
</TouchableOpacity>
|
|
) : (
|
|
<View style={styles.attachmentFile}>
|
|
<Text style={styles.attachmentFileIcon}>
|
|
{att.mimeType?.includes('pdf') ? '\uD83D\uDCC4' :
|
|
att.mimeType?.includes('word') || att.mimeType?.includes('document') ? '\uD83D\uDCC3' :
|
|
att.mimeType?.includes('sheet') || att.mimeType?.includes('excel') ? '\uD83D\uDCC8' :
|
|
'\uD83D\uDCC1'}
|
|
</Text>
|
|
<Text style={styles.attachmentFileName} numberOfLines={1}>{att.name}</Text>
|
|
{att.size ? <Text style={styles.attachmentFileSize}>{Math.round(att.size / 1024)}KB</Text> : null}
|
|
{!att.uri && att.serverPath && (
|
|
<TouchableOpacity onPress={() => rvs.send('file_request' as any, { serverPath: att.serverPath, requestId: item.id })}>
|
|
<Text style={[styles.attachmentFileSize, {color: '#0096FF'}]}>(laden)</Text>
|
|
</TouchableOpacity>
|
|
)}
|
|
{!att.uri && !att.serverPath && <Text style={styles.attachmentFileSize}>(nicht verfuegbar)</Text>}
|
|
</View>
|
|
)}
|
|
</View>
|
|
))}
|
|
{/* Text (nicht anzeigen wenn nur "Anhang empfangen" und ein Bild da ist) */}
|
|
{!(item.text === 'Anhang empfangen' && item.attachments?.some(a => a.type === 'image' && a.uri)) && (
|
|
<MessageText
|
|
text={item.text}
|
|
style={[styles.messageText, isUser ? styles.userText : styles.ariaText]}
|
|
/>
|
|
)}
|
|
{/* Play-Button fuer ARIA-Nachrichten — Cache bevorzugt, sonst Bridge-TTS mit aktueller Engine */}
|
|
{!isUser && item.text.length > 0 && (
|
|
<TouchableOpacity
|
|
style={styles.playButton}
|
|
onPress={() => {
|
|
if (item.audioPath) {
|
|
audioService.playFromPath(item.audioPath);
|
|
} else {
|
|
// messageId mitschicken damit die Bridge das generierte Audio
|
|
// wieder mit der Nachricht verknuepft (fuer den naechsten Replay aus Cache)
|
|
rvs.send('tts_request' as any, {
|
|
text: item.text,
|
|
voice: localXttsVoiceRef.current,
|
|
speed: ttsSpeedRef.current,
|
|
messageId: item.messageId || '',
|
|
});
|
|
}
|
|
}}
|
|
>
|
|
<Text style={styles.playButtonText}>{'\uD83D\uDD0A'}</Text>
|
|
</TouchableOpacity>
|
|
)}
|
|
<Text style={styles.timestamp}>{time}</Text>
|
|
</View>
|
|
);
|
|
};
|
|
|
|
const connectionDotColor =
|
|
connectionState === 'connected' ? '#34C759' :
|
|
connectionState === 'connecting' ? '#FFD60A' : '#FF3B30';
|
|
|
|
return (
|
|
<KeyboardAvoidingView
|
|
style={styles.container}
|
|
behavior={Platform.OS === 'ios' ? 'padding' : undefined}
|
|
keyboardVerticalOffset={90}
|
|
>
|
|
{/* Verbindungsstatus-Leiste */}
|
|
<View style={styles.statusBar}>
|
|
<View style={[styles.statusDot, { backgroundColor: connectionDotColor }]} />
|
|
<Text style={styles.statusText}>
|
|
{connectionState === 'connected' ? 'Verbunden' :
|
|
connectionState === 'connecting' ? 'Verbinde...' : 'Getrennt'}
|
|
</Text>
|
|
<TouchableOpacity onPress={() => setSearchVisible(!searchVisible)} style={{marginLeft: 'auto', paddingHorizontal: 8}}>
|
|
<Text style={{fontSize: 16}}>{'\uD83D\uDD0D'}</Text>
|
|
</TouchableOpacity>
|
|
</View>
|
|
|
|
{/* Service-Status Banner (Gamebox: F5-TTS / Whisper Lade-Status) */}
|
|
{(() => {
|
|
const entries = Object.entries(serviceStatus);
|
|
if (entries.length === 0 || serviceBannerDismissed) return null;
|
|
const anyLoading = entries.some(([, v]) => v.state === 'loading');
|
|
const anyError = entries.some(([, v]) => v.state === 'error');
|
|
const allReady = !anyLoading && !anyError && entries.every(([, v]) => v.state === 'ready');
|
|
const bg = anyError ? '#3A1F1F' : anyLoading ? '#3A331F' : '#1F3A2A';
|
|
const border = anyError ? '#FF3B30' : anyLoading ? '#FFD60A' : '#34C759';
|
|
const labels: Record<string, string> = { f5tts: 'F5-TTS', whisper: 'Whisper STT' };
|
|
return (
|
|
<TouchableOpacity
|
|
activeOpacity={allReady ? 0.6 : 1.0}
|
|
onPress={() => { if (allReady) setServiceBannerDismissed(true); }}
|
|
style={[styles.serviceBanner, { backgroundColor: bg, borderColor: border }]}
|
|
>
|
|
{entries.map(([svc, info]) => {
|
|
let icon = '\u23F3', text = '';
|
|
if (info.state === 'loading') {
|
|
text = `${labels[svc] || svc}: laedt${info.model ? ' ' + info.model : ''}...`;
|
|
} else if (info.state === 'ready') {
|
|
icon = '\u2705';
|
|
const sec = info.loadSeconds ? ` (${info.loadSeconds.toFixed(1)}s)` : '';
|
|
text = `${labels[svc] || svc}: bereit${info.model ? ' ' + info.model : ''}${sec}`;
|
|
} else if (info.state === 'error') {
|
|
icon = '\u274C';
|
|
text = `${labels[svc] || svc}: Fehler ${info.error || ''}`;
|
|
} else {
|
|
text = `${labels[svc] || svc}: ${info.state}`;
|
|
}
|
|
return (
|
|
<Text key={svc} style={styles.serviceBannerLine}>
|
|
{icon} {text}
|
|
</Text>
|
|
);
|
|
})}
|
|
<Text style={styles.serviceBannerHint}>
|
|
{allReady ? 'Tippen zum Schliessen' : 'Bitte warten...'}
|
|
</Text>
|
|
</TouchableOpacity>
|
|
);
|
|
})()}
|
|
|
|
{/* Suchleiste */}
|
|
{searchVisible && (
|
|
<View style={styles.searchBar}>
|
|
<TextInput
|
|
style={styles.searchInput}
|
|
value={searchQuery}
|
|
onChangeText={setSearchQuery}
|
|
placeholder="Chat durchsuchen..."
|
|
placeholderTextColor="#555570"
|
|
autoFocus
|
|
/>
|
|
<TouchableOpacity onPress={() => { setSearchVisible(false); setSearchQuery(''); }}>
|
|
<Text style={{color: '#FF3B30', fontSize: 14, paddingHorizontal: 8}}>X</Text>
|
|
</TouchableOpacity>
|
|
</View>
|
|
)}
|
|
|
|
{/* Nachrichtenliste */}
|
|
<FlatList
|
|
ref={flatListRef}
|
|
inverted
|
|
data={searchQuery ? messages.filter(m => m.text.toLowerCase().includes(searchQuery.toLowerCase())).reverse() : invertedMessages}
|
|
keyExtractor={item => item.id}
|
|
renderItem={renderMessage}
|
|
contentContainerStyle={styles.messageList}
|
|
showsVerticalScrollIndicator={false}
|
|
ListEmptyComponent={
|
|
<View style={styles.emptyContainer}>
|
|
<Text style={styles.emptyIcon}>{'\uD83E\uDD16'}</Text>
|
|
<Text style={styles.emptyText}>ARIA Cockpit</Text>
|
|
<Text style={styles.emptyHint}>Starte eine Konversation mit ARIA</Text>
|
|
</View>
|
|
}
|
|
/>
|
|
|
|
{/* Thinking-Indicator */}
|
|
{agentActivity.activity !== 'idle' && (
|
|
<View style={styles.thinkingBar}>
|
|
<Text style={styles.thinkingText}>
|
|
{agentActivity.activity === 'tool' && agentActivity.tool
|
|
? `\uD83D\uDD27 ${agentActivity.tool}`
|
|
: agentActivity.activity === 'assistant'
|
|
? '\u270D\uFE0F ARIA schreibt...'
|
|
: '\uD83D\uDCAD ARIA denkt...'}
|
|
</Text>
|
|
<TouchableOpacity style={styles.thinkingCancel} onPress={cancelRequest}>
|
|
<Text style={styles.thinkingCancelText}>Abbrechen</Text>
|
|
</TouchableOpacity>
|
|
</View>
|
|
)}
|
|
|
|
{/* Pending Anhaenge Vorschau */}
|
|
{pendingAttachments.length > 0 && (
|
|
<View style={styles.pendingBar}>
|
|
<ScrollView horizontal showsHorizontalScrollIndicator={false} style={{flex: 1}}>
|
|
{pendingAttachments.map((att, idx) => (
|
|
<View key={idx} style={styles.pendingItem}>
|
|
{att.file.type?.startsWith('image/') || att.isPhoto ? (
|
|
<Image
|
|
source={{ uri: att.file.base64
|
|
? `data:${att.file.type};base64,${att.file.base64}`
|
|
: att.file.uri }}
|
|
style={styles.pendingThumb}
|
|
/>
|
|
) : (
|
|
<View style={[styles.pendingThumb, {justifyContent: 'center', alignItems: 'center'}]}>
|
|
<Text style={{fontSize: 20}}>{'\uD83D\uDCC4'}</Text>
|
|
</View>
|
|
)}
|
|
<TouchableOpacity
|
|
style={styles.pendingRemove}
|
|
onPress={() => setPendingAttachments(prev => prev.filter((_, i) => i !== idx))}
|
|
>
|
|
<Text style={{color: '#fff', fontSize: 10, fontWeight: 'bold'}}>X</Text>
|
|
</TouchableOpacity>
|
|
</View>
|
|
))}
|
|
</ScrollView>
|
|
<Text style={{color: '#8888AA', fontSize: 11, marginLeft: 8}}>{pendingAttachments.length}</Text>
|
|
<TouchableOpacity onPress={() => setPendingAttachments([])}>
|
|
<Text style={{color: '#FF3B30', fontSize: 14, paddingHorizontal: 8}}>Alle X</Text>
|
|
</TouchableOpacity>
|
|
</View>
|
|
)}
|
|
|
|
{/* Eingabebereich */}
|
|
<View style={styles.inputContainer}>
|
|
{/* Datei-Buttons */}
|
|
<TouchableOpacity
|
|
style={styles.actionButton}
|
|
onPress={() => setShowFileUpload(true)}
|
|
>
|
|
<Text style={styles.actionIcon}>{'\uD83D\uDCCE'}</Text>
|
|
</TouchableOpacity>
|
|
|
|
<TouchableOpacity
|
|
style={styles.actionButton}
|
|
onPress={() => setShowCameraUpload(true)}
|
|
>
|
|
<Text style={styles.actionIcon}>{'\uD83D\uDCF7'}</Text>
|
|
</TouchableOpacity>
|
|
|
|
{/* Texteingabe */}
|
|
<TextInput
|
|
style={styles.textInput}
|
|
value={inputText}
|
|
onChangeText={setInputText}
|
|
placeholder={pendingAttachments.length > 0 ? "Text zu den Anhaengen (optional)..." : "Nachricht an ARIA..."}
|
|
placeholderTextColor="#555570"
|
|
multiline
|
|
maxLength={4000}
|
|
onSubmitEditing={sendTextMessage}
|
|
returnKeyType="send"
|
|
/>
|
|
|
|
{/* Senden oder Sprache */}
|
|
{inputText.trim() || pendingAttachments.length > 0 ? (
|
|
<TouchableOpacity style={styles.sendButton} onPress={sendTextMessage}>
|
|
<Text style={styles.sendIcon}>{'\u2B06\uFE0F'}</Text>
|
|
</TouchableOpacity>
|
|
) : (
|
|
<>
|
|
<VoiceButton
|
|
onRecordingComplete={handleVoiceRecording}
|
|
disabled={connectionState !== 'connected'}
|
|
wakeWordActive={wakeWordActive}
|
|
/>
|
|
{/* Mund-Button: TTS auf diesem Geraet muten/aufheben.
|
|
Nur sichtbar wenn TTS in den Settings grundsaetzlich aktiv ist. */}
|
|
{ttsDeviceEnabled && (
|
|
<TouchableOpacity
|
|
style={[styles.wakeWordBtn, ttsMuted && styles.mouthBtnMuted]}
|
|
onPress={toggleMute}
|
|
accessibilityLabel={ttsMuted ? 'Sprachausgabe einschalten' : 'Sprachausgabe stumm schalten'}
|
|
>
|
|
<Text style={styles.wakeWordIcon}>{ttsMuted ? '🤐' : '👄'}</Text>
|
|
</TouchableOpacity>
|
|
)}
|
|
<TouchableOpacity
|
|
style={[styles.wakeWordBtn, wakeWordActive && styles.wakeWordBtnActive]}
|
|
onPress={toggleWakeWord}
|
|
>
|
|
<Text style={styles.wakeWordIcon}>{wakeWordActive ? '👂' : '🔇'}</Text>
|
|
</TouchableOpacity>
|
|
</>
|
|
)}
|
|
</View>
|
|
|
|
{/* Bild-Vollbild Modal */}
|
|
<Modal visible={!!fullscreenImage} transparent animationType="fade" onRequestClose={() => setFullscreenImage(null)}>
|
|
<TouchableOpacity
|
|
style={styles.fullscreenOverlay}
|
|
activeOpacity={1}
|
|
onPress={() => setFullscreenImage(null)}
|
|
>
|
|
{fullscreenImage && (
|
|
<Image
|
|
source={{ uri: fullscreenImage }}
|
|
style={styles.fullscreenImage}
|
|
resizeMode="contain"
|
|
/>
|
|
)}
|
|
</TouchableOpacity>
|
|
</Modal>
|
|
|
|
{/* Datei-Upload Modal */}
|
|
<Modal visible={showFileUpload} transparent animationType="slide">
|
|
<View style={styles.modalOverlay}>
|
|
<FileUpload
|
|
onFileSelected={handleFileSelected}
|
|
onCancel={() => setShowFileUpload(false)}
|
|
/>
|
|
</View>
|
|
</Modal>
|
|
|
|
{/* Kamera-Upload Modal */}
|
|
<Modal visible={showCameraUpload} transparent animationType="slide">
|
|
<View style={styles.modalOverlay}>
|
|
<CameraUpload
|
|
onPhotoSelected={handlePhotoSelected}
|
|
onCancel={() => setShowCameraUpload(false)}
|
|
/>
|
|
</View>
|
|
</Modal>
|
|
</KeyboardAvoidingView>
|
|
);
|
|
};
|
|
|
|
// --- Styles ---
|
|
|
|
const styles = StyleSheet.create({
|
|
container: {
|
|
flex: 1,
|
|
backgroundColor: '#0D0D1A',
|
|
},
|
|
statusBar: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
paddingHorizontal: 16,
|
|
paddingVertical: 8,
|
|
backgroundColor: '#12122A',
|
|
borderBottomWidth: 1,
|
|
borderBottomColor: '#1E1E2E',
|
|
},
|
|
statusDot: {
|
|
width: 8,
|
|
height: 8,
|
|
borderRadius: 4,
|
|
marginRight: 8,
|
|
},
|
|
statusText: {
|
|
color: '#8888AA',
|
|
fontSize: 12,
|
|
},
|
|
serviceBanner: {
|
|
paddingVertical: 8,
|
|
paddingHorizontal: 12,
|
|
borderTopWidth: 0,
|
|
borderBottomWidth: 1,
|
|
borderLeftWidth: 0,
|
|
borderRightWidth: 0,
|
|
},
|
|
serviceBannerLine: {
|
|
color: '#FFFFFF',
|
|
fontSize: 12,
|
|
lineHeight: 18,
|
|
},
|
|
serviceBannerHint: {
|
|
color: '#AAAACC',
|
|
fontSize: 10,
|
|
marginTop: 2,
|
|
fontStyle: 'italic',
|
|
},
|
|
messageList: {
|
|
padding: 12,
|
|
paddingBottom: 8,
|
|
flexGrow: 1,
|
|
},
|
|
messageBubble: {
|
|
maxWidth: '80%',
|
|
padding: 12,
|
|
borderRadius: 16,
|
|
marginBottom: 8,
|
|
},
|
|
userBubble: {
|
|
alignSelf: 'flex-end',
|
|
backgroundColor: '#0096FF',
|
|
borderBottomRightRadius: 4,
|
|
},
|
|
ariaBubble: {
|
|
alignSelf: 'flex-start',
|
|
backgroundColor: '#1E1E2E',
|
|
borderBottomLeftRadius: 4,
|
|
},
|
|
messageText: {
|
|
fontSize: 15,
|
|
lineHeight: 21,
|
|
},
|
|
userText: {
|
|
color: '#FFFFFF',
|
|
},
|
|
ariaText: {
|
|
color: '#E0E0F0',
|
|
},
|
|
attachmentImage: {
|
|
width: '100%',
|
|
minHeight: 200,
|
|
maxHeight: 400,
|
|
borderRadius: 8,
|
|
marginBottom: 6,
|
|
backgroundColor: '#0D0D1A',
|
|
},
|
|
attachmentFile: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
backgroundColor: 'rgba(255,255,255,0.1)',
|
|
borderRadius: 8,
|
|
padding: 10,
|
|
marginBottom: 6,
|
|
},
|
|
attachmentFileIcon: {
|
|
fontSize: 24,
|
|
marginRight: 8,
|
|
},
|
|
attachmentFileName: {
|
|
flex: 1,
|
|
color: '#E0E0F0',
|
|
fontSize: 13,
|
|
},
|
|
attachmentFileSize: {
|
|
color: '#8888AA',
|
|
fontSize: 11,
|
|
marginLeft: 8,
|
|
},
|
|
timestamp: {
|
|
color: 'rgba(255,255,255,0.4)',
|
|
fontSize: 10,
|
|
marginTop: 4,
|
|
alignSelf: 'flex-end',
|
|
},
|
|
emptyContainer: {
|
|
flex: 1,
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
paddingTop: 120,
|
|
},
|
|
emptyIcon: {
|
|
fontSize: 48,
|
|
marginBottom: 12,
|
|
},
|
|
emptyText: {
|
|
color: '#FFFFFF',
|
|
fontSize: 22,
|
|
fontWeight: '700',
|
|
},
|
|
emptyHint: {
|
|
color: '#555570',
|
|
fontSize: 14,
|
|
marginTop: 4,
|
|
},
|
|
inputContainer: {
|
|
flexDirection: 'row',
|
|
alignItems: 'flex-end',
|
|
paddingHorizontal: 10,
|
|
paddingVertical: 8,
|
|
backgroundColor: '#12122A',
|
|
borderTopWidth: 1,
|
|
borderTopColor: '#1E1E2E',
|
|
},
|
|
actionButton: {
|
|
width: 38,
|
|
height: 38,
|
|
borderRadius: 19,
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
marginRight: 4,
|
|
},
|
|
actionIcon: {
|
|
fontSize: 20,
|
|
},
|
|
textInput: {
|
|
flex: 1,
|
|
backgroundColor: '#1E1E2E',
|
|
borderRadius: 20,
|
|
paddingHorizontal: 16,
|
|
paddingVertical: 10,
|
|
color: '#FFFFFF',
|
|
fontSize: 15,
|
|
maxHeight: 100,
|
|
marginHorizontal: 6,
|
|
},
|
|
sendButton: {
|
|
width: 40,
|
|
height: 40,
|
|
borderRadius: 20,
|
|
backgroundColor: '#0096FF',
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
},
|
|
sendIcon: {
|
|
fontSize: 18,
|
|
},
|
|
wakeWordBtn: {
|
|
width: 32,
|
|
height: 32,
|
|
borderRadius: 16,
|
|
backgroundColor: 'rgba(255,255,255,0.1)',
|
|
alignItems: 'center',
|
|
justifyContent: 'center',
|
|
marginLeft: 4,
|
|
},
|
|
wakeWordBtnActive: {
|
|
backgroundColor: 'rgba(52, 199, 89, 0.3)',
|
|
},
|
|
mouthBtnMuted: {
|
|
backgroundColor: 'rgba(255, 59, 48, 0.25)',
|
|
},
|
|
wakeWordIcon: {
|
|
fontSize: 16,
|
|
},
|
|
thinkingBar: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
justifyContent: 'space-between',
|
|
backgroundColor: '#1E1E2E',
|
|
paddingHorizontal: 12,
|
|
paddingVertical: 6,
|
|
borderTopWidth: 1,
|
|
borderTopColor: '#2A2A3E',
|
|
},
|
|
thinkingText: {
|
|
color: '#FFD60A',
|
|
fontSize: 12,
|
|
flex: 1,
|
|
},
|
|
thinkingCancel: {
|
|
paddingHorizontal: 10,
|
|
paddingVertical: 4,
|
|
borderWidth: 1,
|
|
borderColor: '#FF3B30',
|
|
borderRadius: 4,
|
|
},
|
|
thinkingCancelText: {
|
|
color: '#FF3B30',
|
|
fontSize: 11,
|
|
fontWeight: 'bold',
|
|
},
|
|
pendingBar: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
backgroundColor: '#1E1E2E',
|
|
paddingHorizontal: 12,
|
|
paddingVertical: 8,
|
|
borderTopWidth: 1,
|
|
borderTopColor: '#2A2A3E',
|
|
},
|
|
pendingItem: {
|
|
position: 'relative',
|
|
marginRight: 8,
|
|
},
|
|
pendingThumb: {
|
|
width: 50,
|
|
height: 50,
|
|
borderRadius: 6,
|
|
backgroundColor: '#0D0D1A',
|
|
},
|
|
pendingRemove: {
|
|
position: 'absolute',
|
|
top: -4,
|
|
right: -4,
|
|
width: 18,
|
|
height: 18,
|
|
borderRadius: 9,
|
|
backgroundColor: '#FF3B30',
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
},
|
|
searchBar: {
|
|
flexDirection: 'row',
|
|
alignItems: 'center',
|
|
backgroundColor: '#12122A',
|
|
paddingHorizontal: 12,
|
|
paddingVertical: 6,
|
|
borderBottomWidth: 1,
|
|
borderBottomColor: '#1E1E2E',
|
|
},
|
|
searchInput: {
|
|
flex: 1,
|
|
color: '#FFFFFF',
|
|
fontSize: 14,
|
|
paddingVertical: 4,
|
|
},
|
|
playButton: {
|
|
alignSelf: 'flex-end',
|
|
paddingHorizontal: 8,
|
|
paddingVertical: 2,
|
|
marginTop: 4,
|
|
},
|
|
playButtonText: {
|
|
fontSize: 16,
|
|
},
|
|
fullscreenOverlay: {
|
|
flex: 1,
|
|
backgroundColor: 'rgba(0,0,0,0.95)',
|
|
justifyContent: 'center',
|
|
alignItems: 'center',
|
|
},
|
|
fullscreenImage: {
|
|
width: '100%',
|
|
height: '100%',
|
|
},
|
|
modalOverlay: {
|
|
flex: 1,
|
|
backgroundColor: 'rgba(0,0,0,0.6)',
|
|
justifyContent: 'center',
|
|
},
|
|
});
|
|
|
|
export default ChatScreen;
|