Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 83c99a5e65 | |||
| e438bb11ff | |||
| 8b4f75bf91 | |||
| d7e7386954 | |||
| 2100c64b91 | |||
| 74ebf59c6f | |||
| 53b49eacad | |||
| 0f11d23c75 | |||
| 311030bdaa |
@@ -79,8 +79,8 @@ android {
|
||||
applicationId "com.ariacockpit"
|
||||
minSdkVersion rootProject.ext.minSdkVersion
|
||||
targetSdkVersion rootProject.ext.targetSdkVersion
|
||||
versionCode 10106
|
||||
versionName "0.1.1.6"
|
||||
versionCode 10109
|
||||
versionName "0.1.1.9"
|
||||
// Fallback fuer Libraries mit Product Flavors
|
||||
missingDimensionStrategy 'react-native-camera', 'general'
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "aria-cockpit",
|
||||
"version": "0.1.1.6",
|
||||
"version": "0.1.1.9",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"android": "react-native run-android",
|
||||
|
||||
@@ -0,0 +1,142 @@
|
||||
/**
|
||||
* ZoomableImage — Pinch-to-Zoom + Pan fuer das Vollbild-Modal.
|
||||
*
|
||||
* Reine React-Native-Implementation ohne externe Lib:
|
||||
* - 1 Finger: Pan wenn schon gezoomt
|
||||
* - 2 Finger: Pinch fuer Zoom + Pan
|
||||
* - Doppel-Tap: Toggle 1x ↔ 2.5x Zoom
|
||||
*
|
||||
* Scale wird auf [1, 5] gecapped, Translation auf das verfuegbare
|
||||
* Image-Volumen (kein Out-of-bounds-Pan).
|
||||
*/
|
||||
|
||||
import React, { useRef } from 'react';
|
||||
import { Animated, PanResponder, View, StyleSheet, ImageStyle, StyleProp } from 'react-native';
|
||||
|
||||
interface Props {
|
||||
uri: string;
|
||||
containerWidth: number;
|
||||
containerHeight: number;
|
||||
style?: StyleProp<ImageStyle>;
|
||||
}
|
||||
|
||||
const ZoomableImage: React.FC<Props> = ({ uri, containerWidth, containerHeight, style }) => {
|
||||
const scale = useRef(new Animated.Value(1)).current;
|
||||
const translateX = useRef(new Animated.Value(0)).current;
|
||||
const translateY = useRef(new Animated.Value(0)).current;
|
||||
|
||||
// Aktuelle Werte (Animated.Value lesen ist async, wir tracken parallel)
|
||||
const current = useRef({ scale: 1, x: 0, y: 0 }).current;
|
||||
// State beim Geste-Start (touchStart-Snapshot)
|
||||
const start = useRef({ scale: 1, x: 0, y: 0, distance: 0, focalX: 0, focalY: 0 }).current;
|
||||
// Doppel-Tap-Erkennung
|
||||
const lastTapAt = useRef(0);
|
||||
|
||||
const distance = (touches: any[]) => {
|
||||
const [a, b] = touches;
|
||||
return Math.hypot(a.pageX - b.pageX, a.pageY - b.pageY);
|
||||
};
|
||||
|
||||
const focal = (touches: any[]) => {
|
||||
const [a, b] = touches;
|
||||
return { x: (a.pageX + b.pageX) / 2, y: (a.pageY + b.pageY) / 2 };
|
||||
};
|
||||
|
||||
const clamp = (v: number, min: number, max: number) => Math.max(min, Math.min(max, v));
|
||||
|
||||
const applyAndClamp = (newScale: number, newX: number, newY: number) => {
|
||||
const s = clamp(newScale, 1, 5);
|
||||
// Maximal-Translation: (imgSize * scale - imgSize) / 2
|
||||
const maxX = Math.max(0, (containerWidth * s - containerWidth) / 2);
|
||||
const maxY = Math.max(0, (containerHeight * s - containerHeight) / 2);
|
||||
const x = clamp(newX, -maxX, maxX);
|
||||
const y = clamp(newY, -maxY, maxY);
|
||||
current.scale = s;
|
||||
current.x = x;
|
||||
current.y = y;
|
||||
scale.setValue(s);
|
||||
translateX.setValue(x);
|
||||
translateY.setValue(y);
|
||||
};
|
||||
|
||||
const responder = useRef(
|
||||
PanResponder.create({
|
||||
onStartShouldSetPanResponder: () => true,
|
||||
onMoveShouldSetPanResponder: () => true,
|
||||
onPanResponderGrant: (_e, gestureState) => {
|
||||
const touches = gestureState as any;
|
||||
const t = touches.numberActiveTouches || 1;
|
||||
// Doppel-Tap-Erkennung (nur bei 1 Finger)
|
||||
if (t === 1) {
|
||||
const now = Date.now();
|
||||
if (now - lastTapAt.current < 280) {
|
||||
// Doppel-Tap → Zoom-Toggle
|
||||
if (current.scale > 1.1) {
|
||||
applyAndClamp(1, 0, 0);
|
||||
} else {
|
||||
applyAndClamp(2.5, 0, 0);
|
||||
}
|
||||
lastTapAt.current = 0;
|
||||
return;
|
||||
}
|
||||
lastTapAt.current = now;
|
||||
}
|
||||
start.scale = current.scale;
|
||||
start.x = current.x;
|
||||
start.y = current.y;
|
||||
},
|
||||
onPanResponderMove: (e, gestureState) => {
|
||||
const touches = e.nativeEvent.touches;
|
||||
if (touches.length >= 2) {
|
||||
// Pinch + Pan
|
||||
if (start.distance === 0) {
|
||||
// Initialisiere die Pinch-Referenz beim Uebergang 1→2 Finger
|
||||
start.distance = distance(touches);
|
||||
const f = focal(touches);
|
||||
start.focalX = f.x;
|
||||
start.focalY = f.y;
|
||||
start.scale = current.scale;
|
||||
start.x = current.x;
|
||||
start.y = current.y;
|
||||
return;
|
||||
}
|
||||
const newDistance = distance(touches);
|
||||
const newFocal = focal(touches);
|
||||
const scaleFactor = newDistance / start.distance;
|
||||
const newScale = clamp(start.scale * scaleFactor, 1, 5);
|
||||
// Pan-Anteil aus Focal-Bewegung
|
||||
const newX = start.x + (newFocal.x - start.focalX);
|
||||
const newY = start.y + (newFocal.y - start.focalY);
|
||||
applyAndClamp(newScale, newX, newY);
|
||||
} else if (touches.length === 1 && current.scale > 1.05) {
|
||||
// Single-Finger-Pan nur wenn gezoomt
|
||||
start.distance = 0; // Reset Pinch-Tracking
|
||||
applyAndClamp(current.scale, start.x + gestureState.dx, start.y + gestureState.dy);
|
||||
}
|
||||
},
|
||||
onPanResponderRelease: () => { start.distance = 0; },
|
||||
onPanResponderTerminate: () => { start.distance = 0; },
|
||||
}),
|
||||
).current;
|
||||
|
||||
return (
|
||||
<View style={StyleSheet.absoluteFill} {...responder.panHandlers}>
|
||||
<Animated.Image
|
||||
source={{ uri }}
|
||||
style={[
|
||||
style,
|
||||
{
|
||||
transform: [
|
||||
{ translateX },
|
||||
{ translateY },
|
||||
{ scale },
|
||||
],
|
||||
},
|
||||
]}
|
||||
resizeMode="contain"
|
||||
/>
|
||||
</View>
|
||||
);
|
||||
};
|
||||
|
||||
export default ZoomableImage;
|
||||
@@ -26,6 +26,8 @@ import {
|
||||
import AsyncStorage from '@react-native-async-storage/async-storage';
|
||||
import RNFS from 'react-native-fs';
|
||||
import { SvgUri } from 'react-native-svg';
|
||||
import { Dimensions } from 'react-native';
|
||||
import ZoomableImage from '../components/ZoomableImage';
|
||||
import rvs, { RVSMessage, ConnectionState } from '../services/rvs';
|
||||
import audioService from '../services/audio';
|
||||
import wakeWordService from '../services/wakeword';
|
||||
@@ -1378,25 +1380,32 @@ const ChatScreen: React.FC = () => {
|
||||
|
||||
{/* Bild-Vollbild Modal */}
|
||||
<Modal visible={!!fullscreenImage} transparent animationType="fade" onRequestClose={() => setFullscreenImage(null)}>
|
||||
<TouchableOpacity
|
||||
style={styles.fullscreenOverlay}
|
||||
activeOpacity={1}
|
||||
onPress={() => setFullscreenImage(null)}
|
||||
>
|
||||
<View style={styles.fullscreenOverlay}>
|
||||
{fullscreenImage && (
|
||||
/\.svg(?:\?|$)/i.test(fullscreenImage) ? (
|
||||
<View style={styles.fullscreenImage}>
|
||||
// SVG: bisher keine Pinch-Zoom — Tap zum Schliessen
|
||||
<TouchableOpacity style={styles.fullscreenImage} activeOpacity={1} onPress={() => setFullscreenImage(null)}>
|
||||
<SvgUri uri={fullscreenImage} width="100%" height="100%" preserveAspectRatio="xMidYMid meet" />
|
||||
</View>
|
||||
</TouchableOpacity>
|
||||
) : (
|
||||
<Image
|
||||
source={{ uri: fullscreenImage }}
|
||||
// Pixel-Bild: Pinch-Zoom + Pan ueber ZoomableImage
|
||||
<ZoomableImage
|
||||
uri={fullscreenImage}
|
||||
containerWidth={Dimensions.get('window').width}
|
||||
containerHeight={Dimensions.get('window').height}
|
||||
style={styles.fullscreenImage}
|
||||
resizeMode="contain"
|
||||
/>
|
||||
)
|
||||
)}
|
||||
{/* Close-Button oben rechts — die TouchableOpacity-uebergreifend funktioniert
|
||||
wegen ZoomableImage-PanResponder nicht zuverlaessig fuer Tap-to-Close */}
|
||||
<TouchableOpacity
|
||||
style={{ position: 'absolute', top: 32, right: 16, padding: 12, backgroundColor: 'rgba(0,0,0,0.5)', borderRadius: 24 }}
|
||||
onPress={() => setFullscreenImage(null)}
|
||||
>
|
||||
<Text style={{ color: '#FFF', fontSize: 22 }}>{'✕'}</Text>
|
||||
</TouchableOpacity>
|
||||
</View>
|
||||
</Modal>
|
||||
|
||||
{/* Datei-Upload Modal */}
|
||||
|
||||
@@ -1469,7 +1469,8 @@ const SettingsScreen: React.FC = () => {
|
||||
<Text style={styles.aboutTitle}>ARIA Cockpit</Text>
|
||||
<Text style={styles.aboutVersion}>Version {require('../../package.json').version}</Text>
|
||||
<Text style={styles.aboutInfo}>
|
||||
Stefans Kommandozentrale f{'\u00FC'}r ARIA.{'\n'}
|
||||
ARIA \u2014 Autonomous Reasoning & Intelligence Assistant.{'\n'}
|
||||
Stefans Kommandozentrale.{'\n'}
|
||||
Gebaut mit React Native + TypeScript.
|
||||
</Text>
|
||||
<TouchableOpacity
|
||||
|
||||
@@ -114,6 +114,14 @@ OHNE diesen Marker erscheint die Datei NICHT in der App / Diagnostic.
|
||||
Mehrere Dateien: mehrere `[FILE: ...]`-Marker am Ende, jeder in
|
||||
eigener Zeile.
|
||||
|
||||
**WICHTIG — Datei MUSS existieren bevor du den Marker setzt.**
|
||||
Marker fuer nicht-existente Pfade werden silent gefiltert + Stefan
|
||||
bekommt einen Hinweis dass du eine Datei versprochen aber nicht
|
||||
erstellt hast. Wenn du z.B. eine MIDI-Datei nicht generieren kannst,
|
||||
sag das offen statt nur den Marker zu setzen. Verifiziere zur Not
|
||||
mit `Bash` + `ls -la /shared/uploads/aria_<name>.<ext>` dass die
|
||||
Datei wirklich da ist.
|
||||
|
||||
### Beispiel — kompletter Workflow
|
||||
|
||||
User: "Schreib mir ein Lasagne-Rezept als md-Datei"
|
||||
|
||||
+15
-4
@@ -894,9 +894,11 @@ class ARIABridge:
|
||||
# enthalten, Endung beliebig). Mehrfach im Text moeglich.
|
||||
_FILE_MARKER_RE = re.compile(r"\[FILE:\s*(/shared/uploads/[^\]]+?)\s*\]", re.IGNORECASE)
|
||||
|
||||
def _extract_file_markers(self, text: str) -> tuple[str, list[dict]]:
|
||||
"""Sucht [FILE: /shared/uploads/...]-Marker, gibt (cleaned_text, file_list) zurueck."""
|
||||
def _extract_file_markers(self, text: str) -> tuple[str, list[dict], list[str]]:
|
||||
"""Sucht [FILE: /shared/uploads/...]-Marker.
|
||||
Returns (cleaned_text, valid_files, missing_paths)."""
|
||||
files: list[dict] = []
|
||||
missing: list[str] = []
|
||||
for m in self._FILE_MARKER_RE.finditer(text):
|
||||
path = m.group(1).strip()
|
||||
if not path.startswith("/shared/uploads/"):
|
||||
@@ -904,6 +906,7 @@ class ARIABridge:
|
||||
continue
|
||||
if not os.path.isfile(path):
|
||||
logger.warning("[core] FILE-Marker zeigt auf nicht existente Datei: %s", path)
|
||||
missing.append(path)
|
||||
continue
|
||||
name = os.path.basename(path)
|
||||
mime, _ = mimetypes.guess_type(path)
|
||||
@@ -917,7 +920,7 @@ class ARIABridge:
|
||||
cleaned = self._FILE_MARKER_RE.sub("", text).strip()
|
||||
# Zwei aufeinanderfolgende Leerzeilen → eine
|
||||
cleaned = re.sub(r"\n{3,}", "\n\n", cleaned)
|
||||
return cleaned, files
|
||||
return cleaned, files, missing
|
||||
|
||||
async def _broadcast_aria_file(self, file_info: dict) -> None:
|
||||
"""ARIA hat eine Datei fuer den User erstellt — App+Diagnostic informieren."""
|
||||
@@ -950,9 +953,17 @@ class ARIABridge:
|
||||
# ARIA legt damit Dateien fuer den User bereit (Bilder, PDFs, etc.).
|
||||
# Der Marker wird aus dem Antworttext entfernt (TTS soll ihn nicht
|
||||
# vorlesen) und parallel als file_from_aria-Event geschickt.
|
||||
text, aria_files = self._extract_file_markers(text)
|
||||
text, aria_files, missing_files = self._extract_file_markers(text)
|
||||
for f in aria_files:
|
||||
await self._broadcast_aria_file(f)
|
||||
# Bei fehlenden Files: User informieren (sonst sieht er nur stille
|
||||
# Verluste — ARIA hat den Marker hingeschrieben aber das File nicht
|
||||
# tatsaechlich angelegt).
|
||||
if missing_files:
|
||||
missing_list = "\n".join(f" • {os.path.basename(p)}" for p in missing_files)
|
||||
text = (text + "\n\n[Hinweis] Folgende Dateien hat ARIA zwar erwaehnt "
|
||||
f"aber nicht erstellt:\n{missing_list}\n"
|
||||
"Bitte ARIA bitten, sie wirklich zu schreiben.").strip()
|
||||
|
||||
metadata = payload.get("metadata", {})
|
||||
is_critical = metadata.get("critical", False)
|
||||
|
||||
+28
-8
@@ -459,6 +459,14 @@
|
||||
<!-- ══════ TAB: Einstellungen ══════ -->
|
||||
<div id="tab-settings" class="main-tab">
|
||||
|
||||
<!-- Was ist ARIA? -->
|
||||
<div class="settings-section">
|
||||
<div class="card" style="max-width:700px;font-size:13px;color:#AAA;border-left:3px solid #0096FF;">
|
||||
<strong style="color:#0096FF;">ARIA</strong> — Autonomous Reasoning & Intelligence Assistant.
|
||||
Selbst gehosteter JARVIS-artiger KI-Assistent, gebaut von Stefan / HackerSoft Oldenburg.
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Betriebsmodus -->
|
||||
<div class="settings-section">
|
||||
<h2>Betriebsmodus</h2>
|
||||
@@ -998,12 +1006,7 @@
|
||||
}
|
||||
|
||||
if (msg.type === 'chat_final') {
|
||||
// [FILE: /shared/uploads/aria_xxx.ext]-Marker aus dem Antworttext
|
||||
// entfernen — die Datei kommt separat via file_from_aria.
|
||||
// (Diagnostic empfaengt chat_final direkt vom Gateway, Bridge
|
||||
// hat darum nicht filtern koennen.)
|
||||
const cleaned = (msg.text || '').replace(/\[FILE:\s*\/shared\/uploads\/[^\]]+\]/gi, '').replace(/\n{3,}/g, '\n\n').trim();
|
||||
addChat('received', cleaned, 'chat:final');
|
||||
addChat('received', msg.text || '', 'chat:final');
|
||||
return;
|
||||
}
|
||||
if (msg.type === 'file_from_aria') {
|
||||
@@ -1095,10 +1098,23 @@
|
||||
chatBox.innerHTML = '';
|
||||
if (msg.messages && msg.messages.length > 0) {
|
||||
for (const m of msg.messages) {
|
||||
if (m.type === 'aria_file') {
|
||||
// ARIA-Datei-Bubble rekonstruieren (statt addAriaFile damit
|
||||
// kein Auto-Scroll-Race waehrend des Bulk-Loads)
|
||||
addAriaFile({ serverPath: m.serverPath, name: m.name, mimeType: m.mimeType, size: m.size });
|
||||
continue;
|
||||
}
|
||||
const el = document.createElement('div');
|
||||
el.className = `chat-msg ${m.type}`;
|
||||
const escaped = escapeHtml(m.text);
|
||||
const linked = linkifyText(escaped);
|
||||
// [FILE: ...]-Marker rausfiltern (gleicher Filter wie addChat)
|
||||
const cleaned = (m.text || '').replace(/\[FILE:\s*\/shared\/uploads\/[^\]]+\]/gi, '').replace(/\n{3,}/g, '\n\n').trim();
|
||||
const escaped = escapeHtml(cleaned);
|
||||
let linked = linkifyText(escaped);
|
||||
// /shared/uploads/-Bildpfade auch im History inline rendern
|
||||
// (gleicher Replace wie in addChat — sonst sieht man nach F5 nur Text-Pfade)
|
||||
linked = linked.replace(/\/shared\/uploads\/[^\s<"]+\.(jpg|jpeg|png|gif|webp|svg|bmp)/gi, (match) => {
|
||||
return `<a href="${match}" target="_blank">${match}</a><img src="${match}" class="chat-media" onclick="openLightbox('image','${match}')" onerror="this.style.display='none'">`;
|
||||
});
|
||||
const time = m.ts ? new Date(m.ts).toLocaleTimeString('de-DE') : '?';
|
||||
el.innerHTML = `${linked}<div class="meta">${escapeHtml(m.meta)} — ${time}</div>`;
|
||||
chatBox.appendChild(el);
|
||||
@@ -1449,6 +1465,10 @@
|
||||
}
|
||||
|
||||
function addChat(type, text, meta, options) {
|
||||
// [FILE: /shared/uploads/aria_xxx.ext]-Marker aus dem Antworttext entfernen —
|
||||
// die Datei kommt separat via file_from_aria-Event als eigene Bubble.
|
||||
// /gi entfernt mehrere Marker, falls ARIA mehrere Dateien in einer Antwort liefert.
|
||||
if (text) text = text.replace(/\[FILE:\s*\/shared\/uploads\/[^\]]+\]/gi, '').replace(/\n{3,}/g, '\n\n').trim();
|
||||
const escaped = escapeHtml(text);
|
||||
let linked = linkifyText(escaped);
|
||||
// /shared/uploads/ Pfade als Inline-Bilder anzeigen
|
||||
|
||||
+29
-1
@@ -2231,7 +2231,35 @@ async function handleLoadChatHistory(clientWs) {
|
||||
} else if (role === "assistant") {
|
||||
// Reply-Prefix entfernen: "[[reply_to_current]] "
|
||||
text = text.replace(/^\[\[reply_to_\w+\]\]\s*/g, "").trim();
|
||||
if (text) chatMessages.push({ type: "received", text, meta: "chat:final", ts: msg.timestamp || obj.timestamp || 0 });
|
||||
const ts = msg.timestamp || obj.timestamp || 0;
|
||||
// ARIA-File-Marker aus dem Text parsen — pro existierender Datei
|
||||
// eine separate file_from_aria-aehnliche Message einfuegen damit die
|
||||
// Anhang-Bubble nach Browser-Refresh wieder erscheint.
|
||||
const fileRe = /\[FILE:\s*(\/shared\/uploads\/[^\]]+?)\s*\]/gi;
|
||||
let m;
|
||||
while ((m = fileRe.exec(text)) !== null) {
|
||||
const p = m[1].trim();
|
||||
try {
|
||||
if (fs.existsSync(p)) {
|
||||
const st = fs.statSync(p);
|
||||
const ext = path.extname(p).toLowerCase();
|
||||
const mimeMap = { ".jpg": "image/jpeg", ".jpeg": "image/jpeg", ".png": "image/png", ".gif": "image/gif",
|
||||
".webp": "image/webp", ".svg": "image/svg+xml", ".pdf": "application/pdf",
|
||||
".mp3": "audio/mpeg", ".mid": "audio/midi", ".midi": "audio/midi",
|
||||
".wav": "audio/wav", ".txt": "text/plain", ".md": "text/markdown",
|
||||
".json": "application/json", ".zip": "application/zip" };
|
||||
chatMessages.push({
|
||||
type: "aria_file",
|
||||
serverPath: p,
|
||||
name: path.basename(p),
|
||||
mimeType: mimeMap[ext] || "application/octet-stream",
|
||||
size: st.size,
|
||||
ts,
|
||||
});
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
if (text) chatMessages.push({ type: "received", text, meta: "chat:final", ts });
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user