feat: SFTP-Backup mit Scheduler, Versionierung und Multi-Target

Mehrere SFTP-Backup-Ziele konfigurierbar mit:
- Host, Port, Benutzername, Passwort, Remote-Pfad
- Konfigurierbares Intervall (15 Min. bis woechentlich oder deaktiviert)
- Maximale Anzahl aufbewahrter Versionen (aeltere werden automatisch geloescht)
- Aktiv/Inaktiv-Toggle pro Ziel

Features:
- Automatischer Hintergrund-Scheduler prueft alle 60 Sekunden ob
  Backups faellig sind und fuehrt sie aus
- Manuelles Backup per Klick ("Jetzt sichern")
- SFTP-Verbindungstest-Button
- Versionen-Dialog: Alle Backup-Versionen auf dem SFTP-Server auflisten
  mit Groesse und Datum
- Restore direkt von SFTP: Version auswaehlen -> wird heruntergeladen
  und ueber die bestehende DB-Merge-Logik wiederhergestellt
- Chunked Upload zum SFTP in 16MB-Bloecken (fuer grosse Backups)
- Status-Anzeige: Letztes Backup, Erfolg/Fehler, Nachricht

Backend: BackupTarget Model, SFTP-Service (paramiko), Backup-Scheduler
API: /admin/backup/targets CRUD, /test, /run, /versions, /restore

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Stefan Hacker
2026-04-11 18:07:28 +02:00
parent c6fe2c590f
commit d42d6d5d96
8 changed files with 817 additions and 1 deletions
+5
View File
@@ -71,4 +71,9 @@ def create_app(config_class=Config):
conn.execute(db.text('PRAGMA journal_mode=WAL'))
conn.commit()
# Start backup scheduler (only in main process, not reloader)
if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
from app.services.backup_scheduler import start_backup_scheduler
start_backup_scheduler(app)
return app
+176
View File
@@ -353,3 +353,179 @@ def _perform_restore(zip_path):
stats['success'] = True
stats['message'] = 'Restore erfolgreich abgeschlossen'
return stats
# ========== SFTP Backup Targets ==========
@api_bp.route('/admin/backup/targets', methods=['GET'])
@admin_required
def list_backup_targets():
from app.models.backup_target import BackupTarget
targets = BackupTarget.query.order_by(BackupTarget.created_at).all()
return jsonify([t.to_dict() for t in targets]), 200
@api_bp.route('/admin/backup/targets', methods=['POST'])
@admin_required
def create_backup_target():
from app.models.backup_target import BackupTarget
from app.services.crypto_service import encrypt_field
data = request.get_json()
for field in ['name', 'host', 'username']:
if not data.get(field):
return jsonify({'error': f'{field} erforderlich'}), 400
if not data.get('password') and not data.get('private_key'):
return jsonify({'error': 'Passwort oder Private Key erforderlich'}), 400
target = BackupTarget(
name=data['name'],
host=data['host'],
port=data.get('port', 22),
username=data['username'],
remote_path=data.get('remote_path', '/backups/minicloud'),
is_active=data.get('is_active', True),
backup_interval_minutes=data.get('backup_interval_minutes', 1440),
max_versions=data.get('max_versions', 10),
)
if data.get('password'):
target.password_encrypted = encrypt_field(data['password'], 'backup-key')
if data.get('private_key'):
target.private_key_encrypted = encrypt_field(data['private_key'], 'backup-key')
db.session.add(target)
db.session.commit()
return jsonify(target.to_dict()), 201
@api_bp.route('/admin/backup/targets/<int:target_id>', methods=['PUT'])
@admin_required
def update_backup_target(target_id):
from app.models.backup_target import BackupTarget
from app.services.crypto_service import encrypt_field
target = db.session.get(BackupTarget, target_id)
if not target:
return jsonify({'error': 'Nicht gefunden'}), 404
data = request.get_json()
for field in ['name', 'host', 'port', 'username', 'remote_path',
'is_active', 'backup_interval_minutes', 'max_versions']:
if field in data:
setattr(target, field, data[field])
if data.get('password'):
target.password_encrypted = encrypt_field(data['password'], 'backup-key')
if data.get('private_key'):
target.private_key_encrypted = encrypt_field(data['private_key'], 'backup-key')
db.session.commit()
return jsonify(target.to_dict()), 200
@api_bp.route('/admin/backup/targets/<int:target_id>', methods=['DELETE'])
@admin_required
def delete_backup_target(target_id):
from app.models.backup_target import BackupTarget
target = db.session.get(BackupTarget, target_id)
if not target:
return jsonify({'error': 'Nicht gefunden'}), 404
db.session.delete(target)
db.session.commit()
return jsonify({'message': 'Backup-Ziel geloescht'}), 200
@api_bp.route('/admin/backup/targets/<int:target_id>/test', methods=['POST'])
@admin_required
def test_backup_target(target_id):
from app.models.backup_target import BackupTarget
from app.services.sftp_backup import test_sftp_connection
target = db.session.get(BackupTarget, target_id)
if not target:
return jsonify({'error': 'Nicht gefunden'}), 404
try:
test_sftp_connection(target)
return jsonify({'message': 'SFTP-Verbindung erfolgreich'}), 200
except Exception as e:
return jsonify({'error': f'Verbindungsfehler: {str(e)}'}), 400
@api_bp.route('/admin/backup/targets/<int:target_id>/run', methods=['POST'])
@admin_required
def run_backup_now(target_id):
"""Manually trigger a backup to this target."""
from app.models.backup_target import BackupTarget
from app.services.sftp_backup import create_backup_zip, upload_backup_to_sftp
target = db.session.get(BackupTarget, target_id)
if not target:
return jsonify({'error': 'Nicht gefunden'}), 404
db_uri = current_app.config['SQLALCHEMY_DATABASE_URI']
db_path = db_uri.replace('sqlite:///', '')
upload_path = current_app.config['UPLOAD_PATH']
zip_path = None
try:
zip_path = create_backup_zip(db_path, upload_path)
version = upload_backup_to_sftp(target, zip_path, current_app)
target.last_backup_at = datetime.now(timezone.utc)
target.last_backup_status = 'success'
target.last_backup_message = f'Version {version} hochgeladen'
db.session.commit()
return jsonify({'message': f'Backup {version} erfolgreich', 'version': version}), 200
except Exception as e:
target.last_backup_at = datetime.now(timezone.utc)
target.last_backup_status = 'error'
target.last_backup_message = str(e)[:500]
db.session.commit()
return jsonify({'error': f'Backup fehlgeschlagen: {str(e)}'}), 500
finally:
if zip_path and os.path.exists(zip_path):
os.unlink(zip_path)
@api_bp.route('/admin/backup/targets/<int:target_id>/versions', methods=['GET'])
@admin_required
def list_backup_versions(target_id):
from app.models.backup_target import BackupTarget
from app.services.sftp_backup import list_sftp_versions
target = db.session.get(BackupTarget, target_id)
if not target:
return jsonify({'error': 'Nicht gefunden'}), 404
try:
versions = list_sftp_versions(target)
return jsonify(versions), 200
except Exception as e:
return jsonify({'error': str(e)}), 500
@api_bp.route('/admin/backup/targets/<int:target_id>/restore/<version_name>', methods=['POST'])
@admin_required
def restore_from_sftp(target_id, version_name):
"""Download a backup version from SFTP and restore it."""
from app.models.backup_target import BackupTarget
from app.services.sftp_backup import download_version_from_sftp
target = db.session.get(BackupTarget, target_id)
if not target:
return jsonify({'error': 'Nicht gefunden'}), 404
zip_path = None
try:
zip_path = download_version_from_sftp(target, version_name)
result = _perform_restore(zip_path)
return jsonify(result), 200
except Exception as e:
return jsonify({'error': f'Restore fehlgeschlagen: {str(e)}'}), 500
finally:
if zip_path and os.path.exists(zip_path):
os.unlink(zip_path)
+1
View File
@@ -5,6 +5,7 @@ from app.models.contact import AddressBook, Contact, AddressBookShare
from app.models.email_account import EmailAccount
from app.models.password_vault import PasswordFolder, PasswordEntry, PasswordShare
from app.models.settings import AppSettings
from app.models.backup_target import BackupTarget
__all__ = [
'User',
+42
View File
@@ -0,0 +1,42 @@
from datetime import datetime, timezone
from app.extensions import db
class BackupTarget(db.Model):
__tablename__ = 'backup_targets'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
host = db.Column(db.String(255), nullable=False)
port = db.Column(db.Integer, default=22)
username = db.Column(db.String(100), nullable=False)
password_encrypted = db.Column(db.LargeBinary, nullable=True)
private_key_encrypted = db.Column(db.LargeBinary, nullable=True)
remote_path = db.Column(db.String(500), default='/backups/minicloud')
is_active = db.Column(db.Boolean, default=True)
backup_interval_minutes = db.Column(db.Integer, default=1440) # Default: daily
max_versions = db.Column(db.Integer, default=10)
last_backup_at = db.Column(db.DateTime, nullable=True)
last_backup_status = db.Column(db.String(20), nullable=True) # 'success', 'error'
last_backup_message = db.Column(db.Text, nullable=True)
created_at = db.Column(db.DateTime, default=lambda: datetime.now(timezone.utc))
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'host': self.host,
'port': self.port,
'username': self.username,
'has_password': bool(self.password_encrypted),
'has_private_key': bool(self.private_key_encrypted),
'remote_path': self.remote_path,
'is_active': self.is_active,
'backup_interval_minutes': self.backup_interval_minutes,
'max_versions': self.max_versions,
'last_backup_at': self.last_backup_at.isoformat() if self.last_backup_at else None,
'last_backup_status': self.last_backup_status,
'last_backup_message': self.last_backup_message,
'created_at': self.created_at.isoformat() if self.created_at else None,
}
+87
View File
@@ -0,0 +1,87 @@
"""Background scheduler for periodic SFTP backups."""
import os
import threading
import time
from datetime import datetime, timezone, timedelta
_scheduler_thread = None
_scheduler_running = False
def start_backup_scheduler(app):
"""Start the background backup scheduler."""
global _scheduler_thread, _scheduler_running
if _scheduler_running:
return
_scheduler_running = True
def scheduler_loop():
while _scheduler_running:
try:
with app.app_context():
_check_and_run_backups(app)
except Exception as e:
print(f'[Backup Scheduler] Error: {e}')
# Check every 60 seconds
for _ in range(60):
if not _scheduler_running:
break
time.sleep(1)
_scheduler_thread = threading.Thread(target=scheduler_loop, daemon=True)
_scheduler_thread.start()
def stop_backup_scheduler():
global _scheduler_running
_scheduler_running = False
def _check_and_run_backups(app):
"""Check all active backup targets and run if due."""
from app.extensions import db
from app.models.backup_target import BackupTarget
from app.services.sftp_backup import create_backup_zip, upload_backup_to_sftp
targets = BackupTarget.query.filter_by(is_active=True).all()
now = datetime.now(timezone.utc)
for target in targets:
if not target.backup_interval_minutes or target.backup_interval_minutes <= 0:
continue
# Check if backup is due
if target.last_backup_at:
next_due = target.last_backup_at + timedelta(minutes=target.backup_interval_minutes)
if now < next_due:
continue
# Run backup
db_uri = app.config['SQLALCHEMY_DATABASE_URI']
db_path = db_uri.replace('sqlite:///', '')
upload_path = app.config['UPLOAD_PATH']
zip_path = None
try:
zip_path = create_backup_zip(db_path, upload_path)
version = upload_backup_to_sftp(target, zip_path, app)
target.last_backup_at = now
target.last_backup_status = 'success'
target.last_backup_message = f'Version {version} erfolgreich hochgeladen'
db.session.commit()
print(f'[Backup] {target.name}: {version} OK')
except Exception as e:
target.last_backup_at = now
target.last_backup_status = 'error'
target.last_backup_message = str(e)[:500]
db.session.commit()
print(f'[Backup] {target.name}: FEHLER - {e}')
finally:
if zip_path and os.path.exists(zip_path):
os.unlink(zip_path)
+213
View File
@@ -0,0 +1,213 @@
"""SFTP Backup Service - handles backup upload/download/versioning."""
import io
import json
import os
import sqlite3
import stat
import tempfile
import zipfile
from datetime import datetime, timezone
from pathlib import Path
import paramiko
def get_sftp_connection(target, app_config=None):
"""Create SFTP connection from a BackupTarget."""
from app.services.crypto_service import decrypt_field
transport = paramiko.Transport((target.host, target.port))
if target.private_key_encrypted:
key_pem = decrypt_field(target.private_key_encrypted, 'backup-key')
pkey = paramiko.RSAKey.from_private_key(io.StringIO(key_pem))
transport.connect(username=target.username, pkey=pkey)
elif target.password_encrypted:
password = decrypt_field(target.password_encrypted, 'backup-key')
transport.connect(username=target.username, password=password)
else:
raise ValueError('Weder Passwort noch Private Key konfiguriert')
return paramiko.SFTPClient.from_transport(transport), transport
def ensure_remote_dir(sftp, path):
"""Recursively create remote directories."""
dirs = []
while path and path != '/' and path != '.':
try:
sftp.stat(path)
break
except FileNotFoundError:
dirs.insert(0, path)
path = os.path.dirname(path)
for d in dirs:
try:
sftp.mkdir(d)
except IOError:
pass
def create_backup_zip(db_path, upload_path):
"""Create a backup ZIP and return the temp file path."""
from app.models.user import User
from app.models.file import File
tmp = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
tmp_path = tmp.name
tmp.close()
metadata = {
'version': '1.0',
'created_at': datetime.now(timezone.utc).isoformat(),
'description': 'Mini-Cloud SFTP Backup',
}
with zipfile.ZipFile(tmp_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) as zf:
zf.writestr('metadata.json', json.dumps(metadata, indent=2))
# DB backup
db_backup_path = tmp_path + '.db'
try:
source = sqlite3.connect(db_path)
dest = sqlite3.connect(db_backup_path)
source.backup(dest)
source.close()
dest.close()
zf.write(db_backup_path, 'database.sqlite3')
finally:
if os.path.exists(db_backup_path):
os.unlink(db_backup_path)
# Files
upload_dir = Path(upload_path)
if upload_dir.exists():
for fp in upload_dir.rglob('*'):
if fp.is_file():
zf.write(str(fp), 'files/' + str(fp.relative_to(upload_dir)))
return tmp_path
def upload_backup_to_sftp(target, zip_path, app):
"""Upload backup ZIP to SFTP target in chunks. Returns version name."""
timestamp = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')
version_name = f'backup_{timestamp}.zip'
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
ensure_remote_dir(sftp, remote_dir)
remote_file = f'{remote_dir}/{version_name}'
# Upload in chunks (16MB)
file_size = os.path.getsize(zip_path)
chunk_size = 16 * 1024 * 1024
with open(zip_path, 'rb') as local_file:
with sftp.open(remote_file, 'wb') as remote:
remote.set_pipelined(True)
while True:
data = local_file.read(chunk_size)
if not data:
break
remote.write(data)
# Write a latest marker
sftp.open(f'{remote_dir}/latest.txt', 'w').write(version_name)
# Cleanup old versions
_cleanup_old_versions(sftp, remote_dir, target.max_versions)
return version_name
finally:
sftp.close()
transport.close()
def _cleanup_old_versions(sftp, remote_dir, max_versions):
"""Remove old backup versions exceeding max_versions."""
try:
entries = sftp.listdir_attr(remote_dir)
backups = sorted(
[e for e in entries if e.filename.startswith('backup_') and e.filename.endswith('.zip')],
key=lambda e: e.st_mtime or 0,
reverse=True,
)
for old in backups[max_versions:]:
try:
sftp.remove(f'{remote_dir}/{old.filename}')
except Exception:
pass
except Exception:
pass
def list_sftp_versions(target):
"""List available backup versions on SFTP target."""
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
try:
entries = sftp.listdir_attr(remote_dir)
except FileNotFoundError:
return []
versions = []
for e in entries:
if e.filename.startswith('backup_') and e.filename.endswith('.zip'):
versions.append({
'name': e.filename,
'size': e.st_size,
'modified': datetime.fromtimestamp(e.st_mtime, tz=timezone.utc).isoformat()
if e.st_mtime else None,
})
versions.sort(key=lambda v: v['name'], reverse=True)
return versions
finally:
sftp.close()
transport.close()
def download_version_from_sftp(target, version_name):
"""Download a specific backup version from SFTP. Returns temp file path."""
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
remote_file = f'{remote_dir}/{version_name}'
tmp = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
tmp_path = tmp.name
tmp.close()
# Download in chunks
with sftp.open(remote_file, 'rb') as remote:
with open(tmp_path, 'wb') as local:
while True:
data = remote.read(16 * 1024 * 1024)
if not data:
break
local.write(data)
return tmp_path
finally:
sftp.close()
transport.close()
def test_sftp_connection(target):
"""Test SFTP connection. Returns True on success, raises on error."""
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
ensure_remote_dir(sftp, remote_dir)
# Try to write a test file
test_file = f'{remote_dir}/.minicloud_test'
sftp.open(test_file, 'w').write('ok')
sftp.remove(test_file)
return True
finally:
sftp.close()
transport.close()
+3
View File
@@ -27,5 +27,8 @@ cryptography==44.0.3
# KeePass Import
pykeepass==4.1.0
# SFTP Backup
paramiko==4.0.0
# Utilities
Pillow==11.1.0