feat(brain): Memory-Anhaenge — multipart/form-data Endpoint daneben Base64
Stefan's Test scheiterte: ein normales Handy-Foto als Base64 in der
curl-d-Argumentliste sprengt Bash's ARG_MAX (typisch 128KB-2MB). Plus:
Browser-FormData und curl -F sind eh der Standard fuer File-Uploads.
Fix: zusaetzlicher Endpoint
POST /memory/{id}/attachments/upload (multipart/form-data, field: file)
Beispiel auf der VM:
curl -F file=@/pfad/zu/foto.jpg \
"$ARIA_BRAIN_URL/memory/<id>/attachments/upload" | jq
Base64-Endpoint (/memory/{id}/attachments) bleibt fuer kleine
Uploads + interne JSON-Tools. Beide rufen am Ende den gleichen
_commit_attachment_meta-Helper, der das Memory-Payload um den
neuen Anhang updated.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
+36
-14
@@ -23,7 +23,7 @@ from typing import List, Optional
|
|||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, BackgroundTasks, Request
|
from fastapi import FastAPI, HTTPException, BackgroundTasks, Request, UploadFile, File
|
||||||
from fastapi.responses import Response
|
from fastapi.responses import Response
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
@@ -317,26 +317,16 @@ def memory_attachments_list(point_id: str):
|
|||||||
return {"memory_id": point_id, "attachments": mem_att.list_attachments(point_id)}
|
return {"memory_id": point_id, "attachments": mem_att.list_attachments(point_id)}
|
||||||
|
|
||||||
|
|
||||||
@app.post("/memory/{point_id}/attachments", response_model=MemoryOut)
|
def _commit_attachment_meta(point_id: str, meta: dict) -> MemoryOut:
|
||||||
def memory_attachments_add(point_id: str, body: AttachmentUploadBody):
|
"""Shared-Helper: nach FS-Write das Payload um den neuen Anhang updaten.
|
||||||
"""Anhang als Base64 hochladen + im Memory-Payload eintragen."""
|
Duplikat-Name wird ersetzt, sonst hinten dran."""
|
||||||
import memory_attachments as mem_att
|
|
||||||
s = store()
|
s = store()
|
||||||
m = s.get(point_id)
|
m = s.get(point_id)
|
||||||
if not m:
|
if not m:
|
||||||
raise HTTPException(404, f"Memory {point_id} nicht gefunden")
|
raise HTTPException(404, f"Memory {point_id} nicht gefunden")
|
||||||
try:
|
|
||||||
meta = mem_att.save_from_base64(point_id, body.name, body.data_base64)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(400, str(exc))
|
|
||||||
|
|
||||||
# Payload aktualisieren — neuer Anhang ans Ende, Duplikate (gleicher
|
|
||||||
# Filename) werden ersetzt damit die Liste nicht zweimal denselben
|
|
||||||
# Eintrag hat
|
|
||||||
atts = [a for a in (m.attachments or []) if a.get("name") != meta["name"]]
|
atts = [a for a in (m.attachments or []) if a.get("name") != meta["name"]]
|
||||||
atts.append(meta)
|
atts.append(meta)
|
||||||
m.attachments = atts
|
m.attachments = atts
|
||||||
from qdrant_client.http import models as qm
|
|
||||||
from memory.vector_store import COLLECTION
|
from memory.vector_store import COLLECTION
|
||||||
import datetime as _dt
|
import datetime as _dt
|
||||||
m.updated_at = _dt.datetime.now(_dt.timezone.utc).isoformat()
|
m.updated_at = _dt.datetime.now(_dt.timezone.utc).isoformat()
|
||||||
@@ -348,6 +338,38 @@ def memory_attachments_add(point_id: str, body: AttachmentUploadBody):
|
|||||||
return MemoryOut.from_point(s.get(point_id))
|
return MemoryOut.from_point(s.get(point_id))
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/memory/{point_id}/attachments", response_model=MemoryOut)
|
||||||
|
def memory_attachments_add(point_id: str, body: AttachmentUploadBody):
|
||||||
|
"""Anhang als Base64 hochladen — fuer Diagnostic + interne Tools.
|
||||||
|
Fuer grosse Files lieber multipart-Variante (/upload) nutzen,
|
||||||
|
Base64 sprengt schnell die Bash-ARG_MAX-Grenze beim curl."""
|
||||||
|
import memory_attachments as mem_att
|
||||||
|
if not store().get(point_id):
|
||||||
|
raise HTTPException(404, f"Memory {point_id} nicht gefunden")
|
||||||
|
try:
|
||||||
|
meta = mem_att.save_from_base64(point_id, body.name, body.data_base64)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(400, str(exc))
|
||||||
|
return _commit_attachment_meta(point_id, meta)
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/memory/{point_id}/attachments/upload", response_model=MemoryOut)
|
||||||
|
async def memory_attachments_upload(point_id: str, file: UploadFile = File(...)):
|
||||||
|
"""Multipart-Upload — Standard fuer Browser-FormData und curl -F.
|
||||||
|
Verwendung:
|
||||||
|
curl -F file=@foto.jpg "$ARIA_BRAIN_URL/memory/<id>/attachments/upload"
|
||||||
|
"""
|
||||||
|
import memory_attachments as mem_att
|
||||||
|
if not store().get(point_id):
|
||||||
|
raise HTTPException(404, f"Memory {point_id} nicht gefunden")
|
||||||
|
data = await file.read()
|
||||||
|
try:
|
||||||
|
meta = mem_att.save_attachment(point_id, file.filename or "datei", data)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(400, str(exc))
|
||||||
|
return _commit_attachment_meta(point_id, meta)
|
||||||
|
|
||||||
|
|
||||||
@app.delete("/memory/{point_id}/attachments/{filename}", response_model=MemoryOut)
|
@app.delete("/memory/{point_id}/attachments/{filename}", response_model=MemoryOut)
|
||||||
def memory_attachments_delete(point_id: str, filename: str):
|
def memory_attachments_delete(point_id: str, filename: str):
|
||||||
"""Einzelnen Anhang loeschen (FS + Payload-Eintrag)."""
|
"""Einzelnen Anhang loeschen (FS + Payload-Eintrag)."""
|
||||||
|
|||||||
Reference in New Issue
Block a user