feat: Backup & Restore mit Chunked Upload fuer grosse Dateien
Backup:
- Erstellt streaming ZIP mit SQLite-DB (via sqlite3.backup API) +
allen hochgeladenen Dateien + metadata.json
- Download als ZIP direkt aus dem Admin-Panel
Restore:
- Kleine Backups (<100MB): Direkter Upload
- Grosse Backups (>100MB bis TB+): Chunked Upload in 10MB-Stuecken
mit Fortschrittsanzeige
- DB-Merge: INSERT OR REPLACE auf gemeinsame Spalten, so dass neue
Schema-Aenderungen erhalten bleiben und Backup-Daten eingefuegt werden
- Dateien werden in data/files/ wiederhergestellt
- Restore-Anleitung direkt in der UI mit Hinweis auf SECRET_KEY/JWT_SECRET_KEY
Backend: /admin/backup, /admin/restore/init, /admin/restore/chunk,
/admin/restore/finalize, /admin/restore/direct
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
0150bf4b2f
commit
c6fe2c590f
|
|
@ -2,4 +2,4 @@ from flask import Blueprint
|
||||||
|
|
||||||
api_bp = Blueprint('api', __name__, url_prefix='/api')
|
api_bp = Blueprint('api', __name__, url_prefix='/api')
|
||||||
|
|
||||||
from app.api import auth, users, files, calendar, contacts, email, office, passwords # noqa: E402, F401
|
from app.api import auth, users, files, calendar, contacts, email, office, passwords, backup # noqa: E402, F401
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,355 @@
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sqlite3
|
||||||
|
import tempfile
|
||||||
|
import uuid
|
||||||
|
import zipfile
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from flask import request, jsonify, current_app, Response
|
||||||
|
|
||||||
|
from app.api import api_bp
|
||||||
|
from app.api.auth import admin_required
|
||||||
|
from app.extensions import db
|
||||||
|
|
||||||
|
|
||||||
|
# Store active chunked uploads in memory (upload_id -> metadata)
|
||||||
|
_active_uploads = {}
|
||||||
|
|
||||||
|
|
||||||
|
# --- Backup ---
|
||||||
|
|
||||||
|
@api_bp.route('/admin/backup', methods=['POST'])
|
||||||
|
@admin_required
|
||||||
|
def create_backup():
|
||||||
|
"""Create a full backup as streaming ZIP download.
|
||||||
|
|
||||||
|
Contains:
|
||||||
|
- metadata.json (version, timestamp, stats)
|
||||||
|
- database.sqlite3 (copy of the SQLite DB)
|
||||||
|
- files/ (all uploaded user files)
|
||||||
|
"""
|
||||||
|
db_uri = current_app.config['SQLALCHEMY_DATABASE_URI']
|
||||||
|
db_path = db_uri.replace('sqlite:///', '')
|
||||||
|
upload_path = Path(current_app.config['UPLOAD_PATH'])
|
||||||
|
|
||||||
|
# Gather stats
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.file import File
|
||||||
|
user_count = User.query.count()
|
||||||
|
file_count = File.query.filter_by(is_folder=False).count()
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
'version': '1.0',
|
||||||
|
'created_at': datetime.now(timezone.utc).isoformat(),
|
||||||
|
'user_count': user_count,
|
||||||
|
'file_count': file_count,
|
||||||
|
'description': 'Mini-Cloud Full Backup',
|
||||||
|
}
|
||||||
|
|
||||||
|
def generate_zip():
|
||||||
|
"""Stream ZIP file in chunks."""
|
||||||
|
# We create the ZIP in a temp file to handle large data
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.zip') as tmp:
|
||||||
|
tmp_path = tmp.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(tmp_path, 'w', zipfile.ZIP_DEFLATED,
|
||||||
|
allowZip64=True) as zf:
|
||||||
|
# 1. Metadata
|
||||||
|
zf.writestr('metadata.json', json.dumps(metadata, indent=2))
|
||||||
|
|
||||||
|
# 2. SQLite database (safe copy via backup API)
|
||||||
|
db_backup_path = tmp_path + '.db'
|
||||||
|
try:
|
||||||
|
source = sqlite3.connect(db_path)
|
||||||
|
dest = sqlite3.connect(db_backup_path)
|
||||||
|
source.backup(dest)
|
||||||
|
source.close()
|
||||||
|
dest.close()
|
||||||
|
zf.write(db_backup_path, 'database.sqlite3')
|
||||||
|
finally:
|
||||||
|
if os.path.exists(db_backup_path):
|
||||||
|
os.unlink(db_backup_path)
|
||||||
|
|
||||||
|
# 3. User files
|
||||||
|
if upload_path.exists():
|
||||||
|
for file_path in upload_path.rglob('*'):
|
||||||
|
if file_path.is_file():
|
||||||
|
arcname = 'files/' + str(file_path.relative_to(upload_path))
|
||||||
|
zf.write(str(file_path), arcname)
|
||||||
|
|
||||||
|
# Stream the ZIP file in 1MB chunks
|
||||||
|
with open(tmp_path, 'rb') as f:
|
||||||
|
while True:
|
||||||
|
chunk = f.read(1024 * 1024)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
finally:
|
||||||
|
if os.path.exists(tmp_path):
|
||||||
|
os.unlink(tmp_path)
|
||||||
|
|
||||||
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||||
|
filename = f'minicloud_backup_{timestamp}.zip'
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
generate_zip(),
|
||||||
|
mimetype='application/zip',
|
||||||
|
headers={
|
||||||
|
'Content-Disposition': f'attachment; filename="{filename}"',
|
||||||
|
'X-Accel-Buffering': 'no',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# --- Chunked Restore Upload ---
|
||||||
|
|
||||||
|
@api_bp.route('/admin/restore/init', methods=['POST'])
|
||||||
|
@admin_required
|
||||||
|
def restore_init():
|
||||||
|
"""Initialize a chunked restore upload.
|
||||||
|
|
||||||
|
Returns an upload_id to use for subsequent chunk uploads.
|
||||||
|
"""
|
||||||
|
data = request.get_json() or {}
|
||||||
|
total_size = data.get('total_size', 0)
|
||||||
|
total_chunks = data.get('total_chunks', 0)
|
||||||
|
filename = data.get('filename', 'backup.zip')
|
||||||
|
|
||||||
|
upload_id = str(uuid.uuid4())
|
||||||
|
upload_dir = Path(tempfile.gettempdir()) / f'minicloud_restore_{upload_id}'
|
||||||
|
upload_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
_active_uploads[upload_id] = {
|
||||||
|
'dir': str(upload_dir),
|
||||||
|
'total_size': total_size,
|
||||||
|
'total_chunks': total_chunks,
|
||||||
|
'received_chunks': set(),
|
||||||
|
'filename': filename,
|
||||||
|
'created_at': datetime.now(timezone.utc).isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
'upload_id': upload_id,
|
||||||
|
'chunk_size': 10 * 1024 * 1024, # 10 MB recommended chunk size
|
||||||
|
}), 200
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.route('/admin/restore/chunk', methods=['POST'])
|
||||||
|
@admin_required
|
||||||
|
def restore_chunk():
|
||||||
|
"""Upload a single chunk of the backup file."""
|
||||||
|
upload_id = request.form.get('upload_id', '')
|
||||||
|
chunk_number = int(request.form.get('chunk_number', 0))
|
||||||
|
|
||||||
|
if upload_id not in _active_uploads:
|
||||||
|
return jsonify({'error': 'Upload-ID unbekannt. Bitte neu starten.'}), 404
|
||||||
|
|
||||||
|
if 'chunk' not in request.files:
|
||||||
|
return jsonify({'error': 'Kein Chunk gesendet'}), 400
|
||||||
|
|
||||||
|
upload_info = _active_uploads[upload_id]
|
||||||
|
upload_dir = Path(upload_info['dir'])
|
||||||
|
|
||||||
|
chunk_file = request.files['chunk']
|
||||||
|
chunk_path = upload_dir / f'chunk_{chunk_number:06d}'
|
||||||
|
chunk_file.save(str(chunk_path))
|
||||||
|
|
||||||
|
upload_info['received_chunks'].add(chunk_number)
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
'chunk_number': chunk_number,
|
||||||
|
'received': len(upload_info['received_chunks']),
|
||||||
|
'total': upload_info['total_chunks'],
|
||||||
|
}), 200
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.route('/admin/restore/finalize', methods=['POST'])
|
||||||
|
@admin_required
|
||||||
|
def restore_finalize():
|
||||||
|
"""Assemble chunks and perform the restore."""
|
||||||
|
data = request.get_json() or {}
|
||||||
|
upload_id = data.get('upload_id', '')
|
||||||
|
|
||||||
|
if upload_id not in _active_uploads:
|
||||||
|
return jsonify({'error': 'Upload-ID unbekannt'}), 404
|
||||||
|
|
||||||
|
upload_info = _active_uploads[upload_id]
|
||||||
|
upload_dir = Path(upload_info['dir'])
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Assemble chunks into ZIP
|
||||||
|
zip_path = upload_dir / 'backup.zip'
|
||||||
|
with open(str(zip_path), 'wb') as outfile:
|
||||||
|
chunk_num = 0
|
||||||
|
while True:
|
||||||
|
chunk_path = upload_dir / f'chunk_{chunk_num:06d}'
|
||||||
|
if not chunk_path.exists():
|
||||||
|
break
|
||||||
|
with open(str(chunk_path), 'rb') as cf:
|
||||||
|
shutil.copyfileobj(cf, outfile)
|
||||||
|
chunk_num += 1
|
||||||
|
|
||||||
|
if chunk_num == 0:
|
||||||
|
return jsonify({'error': 'Keine Chunks gefunden'}), 400
|
||||||
|
|
||||||
|
# Verify it's a valid ZIP
|
||||||
|
if not zipfile.is_zipfile(str(zip_path)):
|
||||||
|
return jsonify({'error': 'Ungueltige ZIP-Datei'}), 400
|
||||||
|
|
||||||
|
# Perform restore
|
||||||
|
result = _perform_restore(str(zip_path))
|
||||||
|
|
||||||
|
return jsonify(result), 200
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({'error': f'Restore fehlgeschlagen: {str(e)}'}), 500
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Cleanup
|
||||||
|
shutil.rmtree(str(upload_dir), ignore_errors=True)
|
||||||
|
_active_uploads.pop(upload_id, None)
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.route('/admin/restore/direct', methods=['POST'])
|
||||||
|
@admin_required
|
||||||
|
def restore_direct():
|
||||||
|
"""Direct restore from a small backup file (non-chunked, for files < 500MB)."""
|
||||||
|
if 'file' not in request.files:
|
||||||
|
return jsonify({'error': 'Keine Datei gesendet'}), 400
|
||||||
|
|
||||||
|
backup_file = request.files['file']
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.zip') as tmp:
|
||||||
|
backup_file.save(tmp.name)
|
||||||
|
tmp_path = tmp.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not zipfile.is_zipfile(tmp_path):
|
||||||
|
return jsonify({'error': 'Ungueltige ZIP-Datei'}), 400
|
||||||
|
|
||||||
|
result = _perform_restore(tmp_path)
|
||||||
|
return jsonify(result), 200
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({'error': f'Restore fehlgeschlagen: {str(e)}'}), 500
|
||||||
|
finally:
|
||||||
|
os.unlink(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
|
def _perform_restore(zip_path):
|
||||||
|
"""Perform the actual restore from a validated ZIP file.
|
||||||
|
|
||||||
|
Strategy for DB merge:
|
||||||
|
- Open backup SQLite as a separate connection
|
||||||
|
- For each table in the backup, read all rows
|
||||||
|
- INSERT OR REPLACE into the live database
|
||||||
|
- This preserves any new tables/columns in the current schema
|
||||||
|
- Existing data with same primary keys gets overwritten by backup data
|
||||||
|
"""
|
||||||
|
upload_path = Path(current_app.config['UPLOAD_PATH'])
|
||||||
|
stats = {'users': 0, 'files_db': 0, 'files_disk': 0, 'tables': []}
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory() as extract_dir:
|
||||||
|
extract_path = Path(extract_dir)
|
||||||
|
|
||||||
|
# Extract ZIP
|
||||||
|
with zipfile.ZipFile(zip_path, 'r') as zf:
|
||||||
|
zf.extractall(str(extract_path))
|
||||||
|
|
||||||
|
# Read metadata
|
||||||
|
metadata_path = extract_path / 'metadata.json'
|
||||||
|
metadata = {}
|
||||||
|
if metadata_path.exists():
|
||||||
|
metadata = json.loads(metadata_path.read_text())
|
||||||
|
stats['backup_date'] = metadata.get('created_at', 'Unbekannt')
|
||||||
|
stats['backup_users'] = metadata.get('user_count', '?')
|
||||||
|
stats['backup_files'] = metadata.get('file_count', '?')
|
||||||
|
|
||||||
|
# Restore database via merge
|
||||||
|
backup_db_path = extract_path / 'database.sqlite3'
|
||||||
|
if backup_db_path.exists():
|
||||||
|
live_db_uri = current_app.config['SQLALCHEMY_DATABASE_URI']
|
||||||
|
live_db_path = live_db_uri.replace('sqlite:///', '')
|
||||||
|
|
||||||
|
backup_conn = sqlite3.connect(str(backup_db_path))
|
||||||
|
backup_conn.row_factory = sqlite3.Row
|
||||||
|
live_conn = sqlite3.connect(live_db_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get list of tables in backup
|
||||||
|
backup_tables = [row[0] for row in
|
||||||
|
backup_conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
||||||
|
).fetchall()]
|
||||||
|
|
||||||
|
# Get list of tables in live DB
|
||||||
|
live_tables = [row[0] for row in
|
||||||
|
live_conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
||||||
|
).fetchall()]
|
||||||
|
|
||||||
|
for table in backup_tables:
|
||||||
|
if table == 'alembic_version':
|
||||||
|
continue
|
||||||
|
if table not in live_tables:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get column names from live table
|
||||||
|
live_cols = [col[1] for col in
|
||||||
|
live_conn.execute(f'PRAGMA table_info("{table}")').fetchall()]
|
||||||
|
backup_cols = [col[1] for col in
|
||||||
|
backup_conn.execute(f'PRAGMA table_info("{table}")').fetchall()]
|
||||||
|
|
||||||
|
# Use only columns that exist in both
|
||||||
|
common_cols = [c for c in backup_cols if c in live_cols]
|
||||||
|
if not common_cols:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cols_str = ', '.join(f'"{c}"' for c in common_cols)
|
||||||
|
placeholders = ', '.join('?' for _ in common_cols)
|
||||||
|
|
||||||
|
rows = backup_conn.execute(
|
||||||
|
f'SELECT {cols_str} FROM "{table}"'
|
||||||
|
).fetchall()
|
||||||
|
|
||||||
|
row_count = 0
|
||||||
|
for row in rows:
|
||||||
|
try:
|
||||||
|
live_conn.execute(
|
||||||
|
f'INSERT OR REPLACE INTO "{table}" ({cols_str}) VALUES ({placeholders})',
|
||||||
|
tuple(row)
|
||||||
|
)
|
||||||
|
row_count += 1
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if row_count > 0:
|
||||||
|
stats['tables'].append({'name': table, 'rows': row_count})
|
||||||
|
|
||||||
|
live_conn.commit()
|
||||||
|
finally:
|
||||||
|
backup_conn.close()
|
||||||
|
live_conn.close()
|
||||||
|
|
||||||
|
# Restore files
|
||||||
|
backup_files_dir = extract_path / 'files'
|
||||||
|
if backup_files_dir.exists():
|
||||||
|
upload_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
file_count = 0
|
||||||
|
for src_file in backup_files_dir.rglob('*'):
|
||||||
|
if src_file.is_file():
|
||||||
|
rel_path = src_file.relative_to(backup_files_dir)
|
||||||
|
dest = upload_path / rel_path
|
||||||
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
shutil.copy2(str(src_file), str(dest))
|
||||||
|
file_count += 1
|
||||||
|
stats['files_disk'] = file_count
|
||||||
|
|
||||||
|
stats['success'] = True
|
||||||
|
stats['message'] = 'Restore erfolgreich abgeschlossen'
|
||||||
|
return stats
|
||||||
|
|
@ -75,6 +75,70 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Backup & Restore -->
|
||||||
|
<div class="admin-section">
|
||||||
|
<h3>Backup & Restore</h3>
|
||||||
|
|
||||||
|
<div class="backup-grid">
|
||||||
|
<!-- Backup -->
|
||||||
|
<div class="backup-card">
|
||||||
|
<h4><i class="pi pi-download"></i> Backup erstellen</h4>
|
||||||
|
<p>Erstellt eine ZIP-Datei mit der kompletten Datenbank und allen hochgeladenen Dateien.</p>
|
||||||
|
<Button label="Backup herunterladen" icon="pi pi-download" @click="createBackup" :loading="backupLoading" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Restore -->
|
||||||
|
<div class="backup-card">
|
||||||
|
<h4><i class="pi pi-upload"></i> Restore</h4>
|
||||||
|
|
||||||
|
<div class="restore-instructions">
|
||||||
|
<strong>Anleitung:</strong>
|
||||||
|
<ol>
|
||||||
|
<li>Neue Mini-Cloud-Instanz aufsetzen (Docker oder manuell)</li>
|
||||||
|
<li>Admin-Benutzer registrieren</li>
|
||||||
|
<li><strong>Wichtig:</strong> In der <code>.env</code> muessen <code>SECRET_KEY</code> und <code>JWT_SECRET_KEY</code> identisch zur alten Instanz sein, sonst koennen verschluesselte Daten (E-Mail-Passwoerter, Passwort-Manager) nicht entschluesselt werden!</li>
|
||||||
|
<li>Backup-ZIP hier hochladen</li>
|
||||||
|
<li>Alle Benutzer, Dateien, Kalender, Kontakte und Einstellungen werden wiederhergestellt</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Message v-if="!restoreInProgress" severity="warn" :closable="false">
|
||||||
|
Die SECRET_KEY und JWT_SECRET_KEY in der .env muessen mit dem Backup uebereinstimmen!
|
||||||
|
</Message>
|
||||||
|
|
||||||
|
<div v-if="!restoreInProgress" class="field">
|
||||||
|
<label>Backup-ZIP auswaehlen</label>
|
||||||
|
<input ref="restoreFileInput" type="file" accept=".zip" @change="onRestoreFileSelected" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div v-if="restoreFile && !restoreInProgress" class="restore-info">
|
||||||
|
<p>Datei: <strong>{{ restoreFile.name }}</strong> ({{ formatSize(restoreFile.size) }})</p>
|
||||||
|
<Button label="Restore starten" icon="pi pi-upload" severity="warn" @click="startRestore" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div v-if="restoreInProgress" class="restore-progress">
|
||||||
|
<p><i class="pi pi-spin pi-spinner"></i> Restore laeuft...</p>
|
||||||
|
<ProgressBar :value="restoreProgress" />
|
||||||
|
<p class="progress-text">{{ restoreStatus }}</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div v-if="restoreResult" class="restore-result">
|
||||||
|
<Message :severity="restoreResult.success ? 'success' : 'error'" :closable="false">
|
||||||
|
{{ restoreResult.message }}
|
||||||
|
</Message>
|
||||||
|
<div v-if="restoreResult.tables?.length" class="result-details">
|
||||||
|
<strong>Wiederhergestellte Tabellen:</strong>
|
||||||
|
<ul>
|
||||||
|
<li v-for="t in restoreResult.tables" :key="t.name">{{ t.name }}: {{ t.rows }} Eintraege</li>
|
||||||
|
</ul>
|
||||||
|
<p v-if="restoreResult.files_disk">Dateien auf Festplatte: {{ restoreResult.files_disk }}</p>
|
||||||
|
<p>Backup vom: {{ restoreResult.backup_date }}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- User Management -->
|
<!-- User Management -->
|
||||||
<div class="admin-section">
|
<div class="admin-section">
|
||||||
<div class="section-header">
|
<div class="section-header">
|
||||||
|
|
@ -257,6 +321,7 @@ import InputSwitch from 'primevue/inputswitch'
|
||||||
import Message from 'primevue/message'
|
import Message from 'primevue/message'
|
||||||
import TabView from 'primevue/tabview'
|
import TabView from 'primevue/tabview'
|
||||||
import TabPanel from 'primevue/tabpanel'
|
import TabPanel from 'primevue/tabpanel'
|
||||||
|
import ProgressBar from 'primevue/progressbar'
|
||||||
|
|
||||||
const toast = useToast()
|
const toast = useToast()
|
||||||
const auth = useAuthStore()
|
const auth = useAuthStore()
|
||||||
|
|
@ -278,6 +343,16 @@ const smtpForm = ref({
|
||||||
const smtpPasswordSet = ref(false)
|
const smtpPasswordSet = ref(false)
|
||||||
const smtpTesting = ref(false)
|
const smtpTesting = ref(false)
|
||||||
|
|
||||||
|
// Backup & Restore
|
||||||
|
const backupLoading = ref(false)
|
||||||
|
const restoreFileInput = ref(null)
|
||||||
|
const restoreFile = ref(null)
|
||||||
|
const restoreInProgress = ref(false)
|
||||||
|
const restoreProgress = ref(0)
|
||||||
|
const restoreStatus = ref('')
|
||||||
|
const restoreResult = ref(null)
|
||||||
|
const CHUNK_SIZE = 10 * 1024 * 1024 // 10 MB
|
||||||
|
|
||||||
const showUserDialog = ref(false)
|
const showUserDialog = ref(false)
|
||||||
const editingUser = ref(null)
|
const editingUser = ref(null)
|
||||||
const userForm = ref({ username: '', email: '', password: '', role: 'user', storage_quota_mb: 5120, is_active: true })
|
const userForm = ref({ username: '', email: '', password: '', role: 'user', storage_quota_mb: 5120, is_active: true })
|
||||||
|
|
@ -343,6 +418,124 @@ async function saveSettings() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- Backup & Restore ---
|
||||||
|
function formatSize(bytes) {
|
||||||
|
if (!bytes) return '0 B'
|
||||||
|
const units = ['B', 'KB', 'MB', 'GB', 'TB']
|
||||||
|
let i = 0; let size = bytes
|
||||||
|
while (size >= 1024 && i < units.length - 1) { size /= 1024; i++ }
|
||||||
|
return `${size.toFixed(i > 0 ? 1 : 0)} ${units[i]}`
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createBackup() {
|
||||||
|
backupLoading.value = true
|
||||||
|
try {
|
||||||
|
const response = await apiClient.post('/admin/backup', {}, { responseType: 'blob' })
|
||||||
|
const url = URL.createObjectURL(response.data)
|
||||||
|
const a = document.createElement('a')
|
||||||
|
a.href = url
|
||||||
|
const disposition = response.headers['content-disposition'] || ''
|
||||||
|
const match = disposition.match(/filename="?(.+?)"?$/)
|
||||||
|
a.download = match ? match[1] : `minicloud_backup_${new Date().toISOString().slice(0,10)}.zip`
|
||||||
|
a.click()
|
||||||
|
URL.revokeObjectURL(url)
|
||||||
|
toast.add({ severity: 'success', summary: 'Backup heruntergeladen', life: 3000 })
|
||||||
|
} catch (err) {
|
||||||
|
toast.add({ severity: 'error', summary: 'Backup fehlgeschlagen', detail: err.response?.data?.error, life: 5000 })
|
||||||
|
} finally {
|
||||||
|
backupLoading.value = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function onRestoreFileSelected(event) {
|
||||||
|
restoreFile.value = event.target.files[0] || null
|
||||||
|
restoreResult.value = null
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startRestore() {
|
||||||
|
if (!restoreFile.value) return
|
||||||
|
|
||||||
|
restoreInProgress.value = true
|
||||||
|
restoreProgress.value = 0
|
||||||
|
restoreStatus.value = 'Starte Upload...'
|
||||||
|
restoreResult.value = null
|
||||||
|
|
||||||
|
const file = restoreFile.value
|
||||||
|
const totalChunks = Math.ceil(file.size / CHUNK_SIZE)
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (file.size <= 100 * 1024 * 1024) {
|
||||||
|
// Small file: direct upload
|
||||||
|
restoreStatus.value = 'Lade Datei hoch...'
|
||||||
|
restoreProgress.value = 50
|
||||||
|
const formData = new FormData()
|
||||||
|
formData.append('file', file)
|
||||||
|
const res = await apiClient.post('/admin/restore/direct', formData, {
|
||||||
|
headers: { 'Content-Type': 'multipart/form-data' },
|
||||||
|
timeout: 600000,
|
||||||
|
})
|
||||||
|
restoreProgress.value = 100
|
||||||
|
restoreResult.value = res.data
|
||||||
|
} else {
|
||||||
|
// Large file: chunked upload
|
||||||
|
// 1. Init
|
||||||
|
restoreStatus.value = 'Initialisiere Upload...'
|
||||||
|
const initRes = await apiClient.post('/admin/restore/init', {
|
||||||
|
total_size: file.size,
|
||||||
|
total_chunks: totalChunks,
|
||||||
|
filename: file.name,
|
||||||
|
})
|
||||||
|
const uploadId = initRes.data.upload_id
|
||||||
|
|
||||||
|
// 2. Upload chunks
|
||||||
|
for (let i = 0; i < totalChunks; i++) {
|
||||||
|
const start = i * CHUNK_SIZE
|
||||||
|
const end = Math.min(start + CHUNK_SIZE, file.size)
|
||||||
|
const chunk = file.slice(start, end)
|
||||||
|
|
||||||
|
const formData = new FormData()
|
||||||
|
formData.append('upload_id', uploadId)
|
||||||
|
formData.append('chunk_number', i.toString())
|
||||||
|
formData.append('chunk', chunk)
|
||||||
|
|
||||||
|
await apiClient.post('/admin/restore/chunk', formData, {
|
||||||
|
headers: { 'Content-Type': 'multipart/form-data' },
|
||||||
|
timeout: 120000,
|
||||||
|
})
|
||||||
|
|
||||||
|
restoreProgress.value = Math.round(((i + 1) / totalChunks) * 80)
|
||||||
|
restoreStatus.value = `Chunk ${i + 1} / ${totalChunks} hochgeladen (${formatSize(end)} / ${formatSize(file.size)})`
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Finalize
|
||||||
|
restoreStatus.value = 'Stelle Daten wieder her...'
|
||||||
|
restoreProgress.value = 85
|
||||||
|
const finalRes = await apiClient.post('/admin/restore/finalize', {
|
||||||
|
upload_id: uploadId,
|
||||||
|
}, { timeout: 600000 })
|
||||||
|
|
||||||
|
restoreProgress.value = 100
|
||||||
|
restoreResult.value = finalRes.data
|
||||||
|
}
|
||||||
|
|
||||||
|
restoreStatus.value = 'Fertig!'
|
||||||
|
if (restoreResult.value?.success) {
|
||||||
|
toast.add({ severity: 'success', summary: 'Restore erfolgreich', life: 5000 })
|
||||||
|
await loadUsers()
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
restoreResult.value = {
|
||||||
|
success: false,
|
||||||
|
message: err.response?.data?.error || 'Restore fehlgeschlagen: ' + String(err),
|
||||||
|
}
|
||||||
|
toast.add({ severity: 'error', summary: 'Restore fehlgeschlagen', life: 5000 })
|
||||||
|
} finally {
|
||||||
|
restoreInProgress.value = false
|
||||||
|
restoreFile.value = null
|
||||||
|
if (restoreFileInput.value) restoreFileInput.value.value = ''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// --- Invite links ---
|
// --- Invite links ---
|
||||||
async function createInvite() {
|
async function createInvite() {
|
||||||
inviteLoading.value = true
|
inviteLoading.value = true
|
||||||
|
|
@ -567,4 +760,21 @@ onMounted(() => {
|
||||||
.acc-actions { display: flex; }
|
.acc-actions { display: flex; }
|
||||||
.empty-hint-small { padding: 1rem; color: var(--p-text-muted-color); font-size: 0.875rem; text-align: center; }
|
.empty-hint-small { padding: 1rem; color: var(--p-text-muted-color); font-size: 0.875rem; text-align: center; }
|
||||||
.section-title { margin: 1rem 0 0.5rem; font-size: 0.95rem; font-weight: 600; }
|
.section-title { margin: 1rem 0 0.5rem; font-size: 0.95rem; font-weight: 600; }
|
||||||
|
.backup-grid { display: grid; grid-template-columns: 1fr 1fr; gap: 1.5rem; }
|
||||||
|
@media (max-width: 900px) { .backup-grid { grid-template-columns: 1fr; } }
|
||||||
|
.backup-card { border: 1px solid var(--p-surface-200); border-radius: 8px; padding: 1.25rem; }
|
||||||
|
.backup-card h4 { margin: 0 0 0.75rem; display: flex; align-items: center; gap: 0.5rem; }
|
||||||
|
.backup-card p { font-size: 0.875rem; color: var(--p-text-muted-color); margin: 0 0 1rem; }
|
||||||
|
.restore-instructions { background: var(--p-surface-50); border-radius: 6px; padding: 1rem; margin-bottom: 1rem; font-size: 0.85rem; }
|
||||||
|
.restore-instructions ol { margin: 0.5rem 0 0; padding-left: 1.25rem; }
|
||||||
|
.restore-instructions li { margin-bottom: 0.375rem; line-height: 1.4; }
|
||||||
|
.restore-instructions code { background: var(--p-surface-200); padding: 0.125rem 0.375rem; border-radius: 3px; font-size: 0.8rem; }
|
||||||
|
.restore-info { margin-top: 1rem; }
|
||||||
|
.restore-info p { margin-bottom: 0.75rem; }
|
||||||
|
.restore-progress { margin-top: 1rem; }
|
||||||
|
.restore-progress p { margin: 0.5rem 0; }
|
||||||
|
.progress-text { font-size: 0.825rem; color: var(--p-text-muted-color); }
|
||||||
|
.restore-result { margin-top: 1rem; }
|
||||||
|
.result-details { font-size: 0.85rem; margin-top: 0.5rem; }
|
||||||
|
.result-details ul { margin: 0.25rem 0; padding-left: 1.25rem; }
|
||||||
</style>
|
</style>
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue