minmal-file-cloud-email-pim.../backend/app/services/sftp_backup.py

214 lines
6.6 KiB
Python

"""SFTP Backup Service - handles backup upload/download/versioning."""
import io
import json
import os
import sqlite3
import stat
import tempfile
import zipfile
from datetime import datetime, timezone
from pathlib import Path
import paramiko
def get_sftp_connection(target, app_config=None):
"""Create SFTP connection from a BackupTarget."""
from app.services.crypto_service import decrypt_field
transport = paramiko.Transport((target.host, target.port))
if target.private_key_encrypted:
key_pem = decrypt_field(target.private_key_encrypted, 'backup-key')
pkey = paramiko.RSAKey.from_private_key(io.StringIO(key_pem))
transport.connect(username=target.username, pkey=pkey)
elif target.password_encrypted:
password = decrypt_field(target.password_encrypted, 'backup-key')
transport.connect(username=target.username, password=password)
else:
raise ValueError('Weder Passwort noch Private Key konfiguriert')
return paramiko.SFTPClient.from_transport(transport), transport
def ensure_remote_dir(sftp, path):
"""Recursively create remote directories."""
dirs = []
while path and path != '/' and path != '.':
try:
sftp.stat(path)
break
except FileNotFoundError:
dirs.insert(0, path)
path = os.path.dirname(path)
for d in dirs:
try:
sftp.mkdir(d)
except IOError:
pass
def create_backup_zip(db_path, upload_path):
"""Create a backup ZIP and return the temp file path."""
from app.models.user import User
from app.models.file import File
tmp = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
tmp_path = tmp.name
tmp.close()
metadata = {
'version': '1.0',
'created_at': datetime.now(timezone.utc).isoformat(),
'description': 'Mini-Cloud SFTP Backup',
}
with zipfile.ZipFile(tmp_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) as zf:
zf.writestr('metadata.json', json.dumps(metadata, indent=2))
# DB backup
db_backup_path = tmp_path + '.db'
try:
source = sqlite3.connect(db_path)
dest = sqlite3.connect(db_backup_path)
source.backup(dest)
source.close()
dest.close()
zf.write(db_backup_path, 'database.sqlite3')
finally:
if os.path.exists(db_backup_path):
os.unlink(db_backup_path)
# Files
upload_dir = Path(upload_path)
if upload_dir.exists():
for fp in upload_dir.rglob('*'):
if fp.is_file():
zf.write(str(fp), 'files/' + str(fp.relative_to(upload_dir)))
return tmp_path
def upload_backup_to_sftp(target, zip_path, app):
"""Upload backup ZIP to SFTP target in chunks. Returns version name."""
timestamp = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')
version_name = f'backup_{timestamp}.zip'
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
ensure_remote_dir(sftp, remote_dir)
remote_file = f'{remote_dir}/{version_name}'
# Upload in chunks (16MB)
file_size = os.path.getsize(zip_path)
chunk_size = 16 * 1024 * 1024
with open(zip_path, 'rb') as local_file:
with sftp.open(remote_file, 'wb') as remote:
remote.set_pipelined(True)
while True:
data = local_file.read(chunk_size)
if not data:
break
remote.write(data)
# Write a latest marker
sftp.open(f'{remote_dir}/latest.txt', 'w').write(version_name)
# Cleanup old versions
_cleanup_old_versions(sftp, remote_dir, target.max_versions)
return version_name
finally:
sftp.close()
transport.close()
def _cleanup_old_versions(sftp, remote_dir, max_versions):
"""Remove old backup versions exceeding max_versions."""
try:
entries = sftp.listdir_attr(remote_dir)
backups = sorted(
[e for e in entries if e.filename.startswith('backup_') and e.filename.endswith('.zip')],
key=lambda e: e.st_mtime or 0,
reverse=True,
)
for old in backups[max_versions:]:
try:
sftp.remove(f'{remote_dir}/{old.filename}')
except Exception:
pass
except Exception:
pass
def list_sftp_versions(target):
"""List available backup versions on SFTP target."""
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
try:
entries = sftp.listdir_attr(remote_dir)
except FileNotFoundError:
return []
versions = []
for e in entries:
if e.filename.startswith('backup_') and e.filename.endswith('.zip'):
versions.append({
'name': e.filename,
'size': e.st_size,
'modified': datetime.fromtimestamp(e.st_mtime, tz=timezone.utc).isoformat()
if e.st_mtime else None,
})
versions.sort(key=lambda v: v['name'], reverse=True)
return versions
finally:
sftp.close()
transport.close()
def download_version_from_sftp(target, version_name):
"""Download a specific backup version from SFTP. Returns temp file path."""
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
remote_file = f'{remote_dir}/{version_name}'
tmp = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
tmp_path = tmp.name
tmp.close()
# Download in chunks
with sftp.open(remote_file, 'rb') as remote:
with open(tmp_path, 'wb') as local:
while True:
data = remote.read(16 * 1024 * 1024)
if not data:
break
local.write(data)
return tmp_path
finally:
sftp.close()
transport.close()
def test_sftp_connection(target):
"""Test SFTP connection. Returns True on success, raises on error."""
sftp, transport = get_sftp_connection(target)
try:
remote_dir = target.remote_path.rstrip('/')
ensure_remote_dir(sftp, remote_dir)
# Try to write a test file
test_file = f'{remote_dir}/.minicloud_test'
sftp.open(test_file, 'w').write('ok')
sftp.remove(test_file)
return True
finally:
sftp.close()
transport.close()