import os import time from pathlib import Path from flask import Flask, Response, redirect, send_from_directory from flask_cors import CORS from app.config import Config from app.extensions import db, bcrypt, migrate def _configure_timezone(tz_name: str) -> None: """Prozess-Zeitzone setzen, sodass datetime.now(), strftime %Z etc. die konfigurierte TZ verwenden. Sichere no-op wenn tzdata fehlt.""" if not tz_name: return os.environ['TZ'] = tz_name tzset = getattr(time, 'tzset', None) if tzset: try: tzset() except Exception: pass def _auto_migrate(db): """Add missing columns to existing tables by comparing model definitions with actual database schema. This handles the case where new columns are added to models but db.create_all() only creates new tables.""" import sqlalchemy with db.engine.connect() as conn: inspector = sqlalchemy.inspect(db.engine) for table_name, table in db.metadata.tables.items(): if not inspector.has_table(table_name): continue existing_cols = {col['name'] for col in inspector.get_columns(table_name)} for column in table.columns: if column.name not in existing_cols: # Determine SQL type col_type = column.type.compile(db.engine.dialect) # Determine default value default = '' if column.default is not None: if hasattr(column.default, 'arg'): val = column.default.arg if callable(val): default = '' elif isinstance(val, str): default = f" DEFAULT '{val}'" elif isinstance(val, bool): default = f" DEFAULT {1 if val else 0}" elif isinstance(val, (int, float)): default = f" DEFAULT {val}" elif column.nullable: default = ' DEFAULT NULL' elif col_type.upper().startswith(('VARCHAR', 'TEXT')): default = " DEFAULT ''" elif col_type.upper().startswith(('INTEGER', 'BIGINT', 'FLOAT')): default = " DEFAULT 0" elif col_type.upper() == 'BOOLEAN': default = " DEFAULT 0" sql = f'ALTER TABLE "{table_name}" ADD COLUMN "{column.name}" {col_type}{default}' try: conn.execute(db.text(sql)) print(f'[Auto-Migrate] Added column {table_name}.{column.name} ({col_type})') except Exception as e: print(f'[Auto-Migrate] Failed to add {table_name}.{column.name}: {e}') conn.commit() def create_app(config_class=Config): # Zeitzone moeglichst frueh setzen - vor allen datetime.now()-Aufrufen _configure_timezone(getattr(config_class, 'TIMEZONE', None) or os.environ.get('TZ')) # Check if static frontend build exists (Docker production mode) static_dir = Path(__file__).resolve().parent.parent / 'static' if static_dir.exists(): app = Flask(__name__, static_folder=str(static_dir), static_url_path='') else: app = Flask(__name__) app.config.from_object(config_class) # DAV-Clients setzen Trailing-Slashes uneinheitlich - daher deaktivieren # wir die strikte Pruefung app-weit. Betrifft alle Blueprints. app.url_map.strict_slashes = False # Ensure data directories exist Path(app.config['UPLOAD_PATH']).mkdir(parents=True, exist_ok=True) db_dir = Path(app.config['SQLALCHEMY_DATABASE_URI'].replace('sqlite:///', '')).parent db_dir.mkdir(parents=True, exist_ok=True) # Initialize extensions db.init_app(app) bcrypt.init_app(app) migrate.init_app(app, db) # CORS CORS(app, resources={r'/api/*': {'origins': app.config['FRONTEND_URL']}}, supports_credentials=True) # Register blueprints from app.api import api_bp app.register_blueprint(api_bp) from app.dav import dav_bp app.register_blueprint(dav_bp) # Well-known URLs for CalDAV/CardDAV auto-discovery (iOS, DAVx5, etc.). # 301-Redirect bei PROPFIND ist bei einigen Clients zickig, deshalb # delegieren wir intern direkt an die DAV-Handler, statt zu redirecten. from flask import request from app.dav.caldav import propfind as dav_propfind, options as dav_options def _wellknown_dav(): if request.method == 'PROPFIND': return dav_propfind(subpath='') if request.method == 'OPTIONS': return dav_options() return redirect('/dav/', code=301) app.add_url_rule( '/.well-known/caldav', view_func=_wellknown_dav, methods=['GET', 'HEAD', 'PROPFIND', 'OPTIONS'], provide_automatic_options=False, ) app.add_url_rule( '/.well-known/carddav', view_func=_wellknown_dav, endpoint='_wellknown_carddav', methods=['GET', 'HEAD', 'PROPFIND', 'OPTIONS'], provide_automatic_options=False, ) # Root DAV discovery: DAVx5 und einige andere Clients probieren zuerst # PROPFIND/OPTIONS auf / (nur Hostname), bevor sie /.well-known nutzen. # Wir reagieren hier auch mit DAV-Properties. def _root_dav(): if request.method == 'PROPFIND': return dav_propfind(subpath='') if request.method == 'OPTIONS': return dav_options() # GET/HEAD: SPA index handhabt das woanders - dieser View matcht nur DAV-Methoden return Response('', 405) app.add_url_rule( '/', view_func=_root_dav, endpoint='_root_dav', methods=['PROPFIND', 'OPTIONS'], provide_automatic_options=False, ) # iCal export (public, no auth) @app.route('/ical/') def ical_export_route(token): from app.api.calendar import ical_export as _ical_export return _ical_export(token) # Serve frontend SPA for all non-API routes (production/Docker) if static_dir.exists(): @app.route('/') def serve_index(): return send_from_directory(str(static_dir), 'index.html') @app.errorhandler(404) def serve_spa(e): return send_from_directory(str(static_dir), 'index.html') # Create tables + auto-migrate missing columns with app.app_context(): from app import models # noqa: F401 db.create_all() # Auto-migrate: add missing columns to existing tables _auto_migrate(db) # Enable WAL mode for SQLite with db.engine.connect() as conn: conn.execute(db.text('PRAGMA journal_mode=WAL')) conn.commit() # Start backup scheduler (only in main process, not reloader) if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true': from app.services.backup_scheduler import start_backup_scheduler start_backup_scheduler(app) # NTP-Offset gegen den konfigurierten Zeitserver pruefen (nicht fatal). ntp_server = app.config.get('NTP_SERVER') or '' if ntp_server.strip(): import threading from app.services.ntp_check import check_and_log threading.Thread( target=check_and_log, args=(ntp_server.strip(), app.logger), daemon=True, ).start() return app