diff --git a/Dockerfile b/Dockerfile index a54da53..e8a3231 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,6 +7,8 @@ RUN pip install --no-cache-dir -r requirements.txt COPY app/ ./app/ COPY config/ ./config/ +COPY alembic/ ./alembic/ +COPY alembic.ini . EXPOSE 8000 diff --git a/README.md b/README.md index 551ce5c..bd042e1 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,25 @@ Beim Start wird `config/filters.yaml` automatisch importiert (konfigurierbar via | `YAML_SYNC_ON_STARTUP` | `true` | YAML-Datei beim Start importieren | | `DATABASE_URL` | `sqlite:///data/mailfilter.db` | Datenbank-Pfad | +## Datenbank-Migrationen + +Die Datenbank wird automatisch beim Start per **Alembic** migriert — Konten und Filterregeln bleiben bei Updates erhalten. + +Falls du lokal entwickelst und das Schema änderst: + +```bash +# Neue Migration erstellen (nach Änderung an db_models.py) +.venv/bin/alembic revision --autogenerate -m "beschreibung der änderung" + +# Migration anwenden +.venv/bin/alembic upgrade head + +# Migrationsstatus prüfen +.venv/bin/alembic current +``` + +Im Docker passiert das automatisch beim Container-Start. + ## Projektstruktur ``` @@ -141,6 +160,9 @@ Beim Start wird `config/filters.yaml` automatisch importiert (konfigurierbar via │ │ └── encryption.py # Passwort-Verschlüsselung │ ├── templates/ # Jinja2 HTML-Templates │ └── static/ # CSS + JS +├── alembic/ # Datenbank-Migrationen +│ ├── env.py +│ └── versions/ # Migrations-Skripte ├── config/filters.yaml # YAML-Filterkonfiguration ├── data/ # SQLite-Datenbank (Docker Volume) ├── docker-compose.yml diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..807ded2 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,149 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s +# Or organize into date-based subdirectories (requires recursive_version_locations = true) +# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH +# hooks = ruff +# ruff.type = exec +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..eaebda2 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,59 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config, pool + +from alembic import context + +from app.config import settings +from app.database import Base +from app.models.db_models import Account, FilterRule, FilterCondition, FilterAction # noqa: F401 + +config = context.config + +# Set DB URL from app config +config.set_main_option("sqlalchemy.url", settings.database_url) + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +target_metadata = Base.metadata + + +def render_item(type_, obj, autogen_context): + """Custom render for SQLite enum handling.""" + return False + + +def run_migrations_offline() -> None: + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + render_as_batch=True, # Required for SQLite ALTER TABLE + ) + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + render_as_batch=True, # Required for SQLite ALTER TABLE + ) + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..1101630 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/0ef2a4f77557_add_processed_mails_table.py b/alembic/versions/0ef2a4f77557_add_processed_mails_table.py new file mode 100644 index 0000000..1544671 --- /dev/null +++ b/alembic/versions/0ef2a4f77557_add_processed_mails_table.py @@ -0,0 +1,48 @@ +"""add processed_mails table + +Revision ID: 0ef2a4f77557 +Revises: 4f8cb93713e8 +Create Date: 2026-03-19 13:47:28.217052 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '0ef2a4f77557' +down_revision: Union[str, Sequence[str], None] = '4f8cb93713e8' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('processed_mails', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('account_id', sa.Integer(), nullable=False), + sa.Column('folder', sa.String(length=255), nullable=False), + sa.Column('mail_uid', sa.String(length=100), nullable=False), + sa.Column('mail_subject', sa.String(length=500), nullable=True), + sa.Column('mail_from', sa.String(length=255), nullable=True), + sa.Column('processed_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('processed_mails', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_processed_mails_account_id'), ['account_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('processed_mails', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_processed_mails_account_id')) + + op.drop_table('processed_mails') + # ### end Alembic commands ### diff --git a/alembic/versions/4f8cb93713e8_add_filter_logs_table.py b/alembic/versions/4f8cb93713e8_add_filter_logs_table.py new file mode 100644 index 0000000..3cc26f8 --- /dev/null +++ b/alembic/versions/4f8cb93713e8_add_filter_logs_table.py @@ -0,0 +1,47 @@ +"""add filter_logs table + +Revision ID: 4f8cb93713e8 +Revises: c2a398ed74d6 +Create Date: 2026-03-19 13:37:33.433556 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '4f8cb93713e8' +down_revision: Union[str, Sequence[str], None] = 'c2a398ed74d6' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('filter_logs', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('account_id', sa.Integer(), nullable=True), + sa.Column('account_name', sa.String(length=100), nullable=False), + sa.Column('level', sa.Enum('INFO', 'WARNING', 'ERROR', 'SUCCESS', name='loglevel'), nullable=False), + sa.Column('message', sa.String(length=1000), nullable=False), + sa.Column('rule_name', sa.String(length=200), nullable=True), + sa.Column('action_type', sa.String(length=50), nullable=True), + sa.Column('mail_uid', sa.String(length=100), nullable=True), + sa.Column('mail_subject', sa.String(length=500), nullable=True), + sa.Column('mail_from', sa.String(length=255), nullable=True), + sa.Column('folder', sa.String(length=255), nullable=True), + sa.Column('details', sa.String(length=2000), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('filter_logs') + # ### end Alembic commands ### diff --git a/alembic/versions/c2a398ed74d6_initial_schema.py b/alembic/versions/c2a398ed74d6_initial_schema.py new file mode 100644 index 0000000..639a419 --- /dev/null +++ b/alembic/versions/c2a398ed74d6_initial_schema.py @@ -0,0 +1,84 @@ +"""initial schema + +Revision ID: c2a398ed74d6 +Revises: +Create Date: 2026-03-19 13:28:01.842649 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'c2a398ed74d6' +down_revision: Union[str, Sequence[str], None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('accounts', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=100), nullable=False), + sa.Column('imap_host', sa.String(length=255), nullable=False), + sa.Column('imap_port', sa.Integer(), nullable=False), + sa.Column('use_ssl', sa.Boolean(), nullable=False), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('password', sa.String(length=255), nullable=False), + sa.Column('smtp_host', sa.String(length=255), nullable=True), + sa.Column('smtp_port', sa.Integer(), nullable=True), + sa.Column('smtp_username', sa.String(length=255), nullable=True), + sa.Column('smtp_password', sa.String(length=255), nullable=True), + sa.Column('poll_interval_seconds', sa.Integer(), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.Column('last_poll_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('filter_rules', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('account_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=200), nullable=False), + sa.Column('priority', sa.Integer(), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.Column('stop_processing', sa.Boolean(), nullable=False), + sa.Column('source_folder', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('filter_actions', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('rule_id', sa.Integer(), nullable=False), + sa.Column('action_type', sa.Enum('MOVE', 'FORWARD', 'DELETE', 'MARK_READ', name='actiontype'), nullable=False), + sa.Column('parameter', sa.String(length=500), nullable=True), + sa.ForeignKeyConstraint(['rule_id'], ['filter_rules.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('filter_conditions', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('rule_id', sa.Integer(), nullable=False), + sa.Column('field', sa.Enum('FROM', 'TO', 'SUBJECT', 'BODY', 'HAS_ATTACHMENT', 'DATE', name='conditionfield'), nullable=False), + sa.Column('match_type', sa.Enum('CONTAINS', 'REGEX', 'EXACT', 'ON_DATE', 'BEFORE', 'AFTER', 'DATE_RANGE', 'YEAR', 'LAST_N_DAYS', 'LAST_N_WEEKS', 'LAST_N_MONTHS', 'OLDER_THAN_DAYS', 'OLDER_THAN_WEEKS', 'OLDER_THAN_MONTHS', name='matchtype'), nullable=False), + sa.Column('value', sa.String(length=500), nullable=False), + sa.Column('negate', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['rule_id'], ['filter_rules.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('filter_conditions') + op.drop_table('filter_actions') + op.drop_table('filter_rules') + op.drop_table('accounts') + # ### end Alembic commands ### diff --git a/app/main.py b/app/main.py index f79f2fb..330d93e 100644 --- a/app/main.py +++ b/app/main.py @@ -18,8 +18,12 @@ logger = logging.getLogger(__name__) @asynccontextmanager async def lifespan(app: FastAPI): logger.info("Starte IMAP Mail Filter Service...") - Base.metadata.create_all(bind=engine) - logger.info("Datenbank initialisiert.") + # Datenbank-Migration mit Alembic + from alembic.config import Config + from alembic import command + alembic_cfg = Config("alembic.ini") + command.upgrade(alembic_cfg, "head") + logger.info("Datenbank-Migration abgeschlossen.") if settings.yaml_sync_on_startup: from app.services.yaml_service import import_from_file result = import_from_file() @@ -40,11 +44,12 @@ from sqlalchemy.orm import Session # noqa: E402 from app.database import get_db # noqa: E402 from app.models.db_models import Account # noqa: E402 -from app.routers import accounts, filters, yaml_sync # noqa: E402 +from app.routers import accounts, filters, logs, yaml_sync # noqa: E402 app.include_router(accounts.router) app.include_router(filters.router) app.include_router(yaml_sync.router) +app.include_router(logs.router) # --- Web-UI Routen --- @@ -98,3 +103,9 @@ def filters_page(request: Request, account_id: int = 0, db: Session = Depends(ge @app.get("/yaml") def yaml_page(request: Request): return templates.TemplateResponse("yaml.html", {"request": request}) + + +@app.get("/logs") +def logs_page(request: Request, db: Session = Depends(get_db)): + accs = db.query(Account).order_by(Account.name).all() + return templates.TemplateResponse("logs.html", {"request": request, "accounts": accs}) diff --git a/app/models/db_models.py b/app/models/db_models.py index c25057e..c17c0ff 100644 --- a/app/models/db_models.py +++ b/app/models/db_models.py @@ -13,12 +13,25 @@ class ConditionField(str, enum.Enum): SUBJECT = "subject" BODY = "body" HAS_ATTACHMENT = "has_attachment" + DATE = "date" class MatchType(str, enum.Enum): CONTAINS = "contains" REGEX = "regex" EXACT = "exact" + # Date-specific match types + ON_DATE = "on_date" + BEFORE = "before" + AFTER = "after" + DATE_RANGE = "date_range" + YEAR = "year" + LAST_N_DAYS = "last_n_days" + LAST_N_WEEKS = "last_n_weeks" + LAST_N_MONTHS = "last_n_months" + OLDER_THAN_DAYS = "older_than_days" + OLDER_THAN_WEEKS = "older_than_weeks" + OLDER_THAN_MONTHS = "older_than_months" class ActionType(str, enum.Enum): @@ -101,3 +114,42 @@ class FilterAction(Base): parameter: Mapped[str | None] = mapped_column(String(500), nullable=True) rule: Mapped["FilterRule"] = relationship(back_populates="actions") + + +class LogLevel(str, enum.Enum): + INFO = "info" + WARNING = "warning" + ERROR = "error" + SUCCESS = "success" + + +class FilterLog(Base): + __tablename__ = "filter_logs" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + account_id: Mapped[int | None] = mapped_column(Integer, nullable=True) + account_name: Mapped[str] = mapped_column(String(100), default="") + level: Mapped[LogLevel] = mapped_column(Enum(LogLevel), default=LogLevel.INFO) + message: Mapped[str] = mapped_column(String(1000)) + rule_name: Mapped[str | None] = mapped_column(String(200), nullable=True) + action_type: Mapped[str | None] = mapped_column(String(50), nullable=True) + mail_uid: Mapped[str | None] = mapped_column(String(100), nullable=True) + mail_subject: Mapped[str | None] = mapped_column(String(500), nullable=True) + mail_from: Mapped[str | None] = mapped_column(String(255), nullable=True) + folder: Mapped[str | None] = mapped_column(String(255), nullable=True) + details: Mapped[str | None] = mapped_column(String(2000), nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.now()) + + +class ProcessedMail(Base): + __tablename__ = "processed_mails" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + account_id: Mapped[int] = mapped_column(ForeignKey("accounts.id", ondelete="CASCADE"), index=True) + folder: Mapped[str] = mapped_column(String(255)) + mail_uid: Mapped[str] = mapped_column(String(100)) + mail_subject: Mapped[str | None] = mapped_column(String(500), nullable=True) + mail_from: Mapped[str | None] = mapped_column(String(255), nullable=True) + processed_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.now()) + + account: Mapped["Account"] = relationship() diff --git a/app/routers/accounts.py b/app/routers/accounts.py index f98a2d8..b49fc86 100644 --- a/app/routers/accounts.py +++ b/app/routers/accounts.py @@ -2,15 +2,17 @@ from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session from app.database import get_db -from app.models.db_models import Account +from app.models.db_models import Account, ProcessedMail from app.schemas.schemas import ( AccountCreate, AccountListResponse, AccountResponse, AccountUpdate, ) +from pydantic import BaseModel + from app.services.encryption import decrypt, encrypt -from app.services.imap_client import async_test_connection +from app.services.imap_client import IMAPClient, async_test_connection router = APIRouter(prefix="/api/accounts", tags=["accounts"]) @@ -87,6 +89,26 @@ async def test_account_connection(account_id: int, db: Session = Depends(get_db) return {"success": success, "message": "Verbindung erfolgreich" if success else "Verbindung fehlgeschlagen"} +class TestConnectionRequest(BaseModel): + imap_host: str + imap_port: int = 993 + use_ssl: bool = True + username: str + password: str + + +@router.post("/test-connection") +async def test_connection_direct(data: TestConnectionRequest): + success = await async_test_connection( + host=data.imap_host, + port=data.imap_port, + username=data.username, + password=data.password, + use_ssl=data.use_ssl, + ) + return {"success": success, "message": "Verbindung erfolgreich" if success else "Verbindung fehlgeschlagen"} + + @router.post("/{account_id}/poll-now") async def poll_now(account_id: int, db: Session = Depends(get_db)): account = db.get(Account, account_id) @@ -95,3 +117,90 @@ async def poll_now(account_id: int, db: Session = Depends(get_db)): from app.services.scheduler import poll_account await poll_account(account_id) return {"message": f"Polling für '{account.name}' durchgeführt"} + + +@router.get("/{account_id}/processed") +def get_processed_stats(account_id: int, db: Session = Depends(get_db)): + account = db.get(Account, account_id) + if not account: + raise HTTPException(404, "Konto nicht gefunden") + total = db.query(ProcessedMail).filter(ProcessedMail.account_id == account_id).count() + return {"account_id": account_id, "processed_count": total} + + +@router.delete("/{account_id}/processed") +def reset_processed(account_id: int, folder: str | None = None, db: Session = Depends(get_db)): + account = db.get(Account, account_id) + if not account: + raise HTTPException(404, "Konto nicht gefunden") + query = db.query(ProcessedMail).filter(ProcessedMail.account_id == account_id) + if folder: + query = query.filter(ProcessedMail.folder == folder) + count = query.delete() + db.commit() + scope = f"Ordner '{folder}'" if folder else "alle Ordner" + return {"message": f"Verarbeitung zurückgesetzt für {scope} ({count} Einträge)", "deleted": count} + + +def _get_imap_client(account: Account) -> IMAPClient: + return IMAPClient( + host=account.imap_host, + port=account.imap_port, + username=account.username, + password=decrypt(account.password), + use_ssl=account.use_ssl, + ) + + +@router.get("/{account_id}/folders") +async def list_folders(account_id: int, debug: bool = False, db: Session = Depends(get_db)): + account = db.get(Account, account_id) + if not account: + raise HTTPException(404, "Konto nicht gefunden") + + import asyncio + def _list(): + client = _get_imap_client(account) + with client: + folders = client.list_folders() + raw = None + if debug: + status, data = client.conn.list() + raw = [item.decode("utf-8", errors="replace") if isinstance(item, bytes) else str(item) for item in (data or [])] + return folders, raw + + try: + folders, raw = await asyncio.to_thread(_list) + result = {"folders": folders} + if debug and raw is not None: + result["raw"] = raw + return result + except Exception as e: + raise HTTPException(500, f"Fehler beim Abrufen der Ordner: {e}") + + +class CreateFolderRequest(BaseModel): + folder_name: str + + +@router.post("/{account_id}/folders") +async def create_folder(account_id: int, data: CreateFolderRequest, db: Session = Depends(get_db)): + account = db.get(Account, account_id) + if not account: + raise HTTPException(404, "Konto nicht gefunden") + + import asyncio + def _create(): + client = _get_imap_client(account) + with client: + return client.create_folder(data.folder_name) + + try: + success = await asyncio.to_thread(_create) + if success: + return {"success": True, "message": f"Ordner '{data.folder_name}' erstellt"} + raise HTTPException(400, f"Ordner '{data.folder_name}' konnte nicht erstellt werden") + except HTTPException: + raise + except Exception as e: + raise HTTPException(500, f"Fehler: {e}") diff --git a/app/routers/filters.py b/app/routers/filters.py index 74bb151..fc83267 100644 --- a/app/routers/filters.py +++ b/app/routers/filters.py @@ -1,10 +1,26 @@ from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session +import logging + from app.database import get_db -from app.models.db_models import Account, FilterAction, FilterCondition, FilterRule +from app.models.db_models import Account, FilterAction, FilterCondition, FilterRule, ProcessedMail from app.schemas.schemas import FilterRuleCreate, FilterRuleResponse, FilterRuleUpdate +logger = logging.getLogger(__name__) + + +def _reset_processed_for_folder(db: Session, account_id: int, folder: str) -> int: + """Reset processed mails for a specific account/folder so they get re-evaluated.""" + count = ( + db.query(ProcessedMail) + .filter(ProcessedMail.account_id == account_id, ProcessedMail.folder == folder) + .delete() + ) + if count: + logger.info("Filter geändert: %d verarbeitete Mails in '%s' zurückgesetzt (Account %d)", count, folder, account_id) + return count + router = APIRouter(prefix="/api/filters", tags=["filters"]) @@ -54,6 +70,9 @@ def create_filter(data: FilterRuleCreate, db: Session = Depends(get_db)): action = FilterAction(rule_id=rule.id, **action_data.model_dump()) db.add(action) + # Neue Regel → Ordner zurücksetzen damit bestehende Mails geprüft werden + _reset_processed_for_folder(db, data.account_id, data.source_folder) + db.commit() db.refresh(rule) return rule @@ -65,6 +84,7 @@ def update_filter(rule_id: int, data: FilterRuleUpdate, db: Session = Depends(ge if not rule: raise HTTPException(404, "Filterregel nicht gefunden") + old_folder = rule.source_folder update_data = data.model_dump(exclude_unset=True) # Update conditions if provided @@ -88,6 +108,12 @@ def update_filter(rule_id: int, data: FilterRuleUpdate, db: Session = Depends(ge for key, value in update_data.items(): setattr(rule, key, value) + # Regel geändert → betroffene Ordner zurücksetzen + _reset_processed_for_folder(db, rule.account_id, old_folder) + new_folder = rule.source_folder + if new_folder != old_folder: + _reset_processed_for_folder(db, rule.account_id, new_folder) + db.commit() db.refresh(rule) return rule @@ -98,6 +124,8 @@ def delete_filter(rule_id: int, db: Session = Depends(get_db)): rule = db.get(FilterRule, rule_id) if not rule: raise HTTPException(404, "Filterregel nicht gefunden") + # Ordner zurücksetzen — andere Regeln könnten jetzt anders greifen + _reset_processed_for_folder(db, rule.account_id, rule.source_folder) db.delete(rule) db.commit() diff --git a/app/routers/logs.py b/app/routers/logs.py new file mode 100644 index 0000000..c9dd7aa --- /dev/null +++ b/app/routers/logs.py @@ -0,0 +1,55 @@ +from fastapi import APIRouter, Depends, Query +from sqlalchemy.orm import Session + +from app.database import get_db +from app.models.db_models import FilterLog + +router = APIRouter(prefix="/api/logs", tags=["logs"]) + + +@router.get("/") +def get_logs( + account_id: int | None = None, + level: str | None = None, + limit: int = Query(default=100, le=500), + offset: int = 0, + db: Session = Depends(get_db), +): + query = db.query(FilterLog).order_by(FilterLog.created_at.desc()) + if account_id: + query = query.filter(FilterLog.account_id == account_id) + if level: + query = query.filter(FilterLog.level == level) + total = query.count() + logs = query.offset(offset).limit(limit).all() + return { + "total": total, + "logs": [ + { + "id": log.id, + "account_id": log.account_id, + "account_name": log.account_name, + "level": log.level.value if log.level else "info", + "message": log.message, + "rule_name": log.rule_name, + "action_type": log.action_type, + "mail_uid": log.mail_uid, + "mail_subject": log.mail_subject, + "mail_from": log.mail_from, + "folder": log.folder, + "details": log.details, + "created_at": log.created_at.isoformat() if log.created_at else None, + } + for log in logs + ], + } + + +@router.delete("/") +def clear_logs(account_id: int | None = None, db: Session = Depends(get_db)): + query = db.query(FilterLog) + if account_id: + query = query.filter(FilterLog.account_id == account_id) + count = query.delete() + db.commit() + return {"deleted": count} diff --git a/app/routers/yaml_sync.py b/app/routers/yaml_sync.py index 3dd5f7e..cf0e373 100644 --- a/app/routers/yaml_sync.py +++ b/app/routers/yaml_sync.py @@ -1,8 +1,9 @@ from fastapi import APIRouter, Depends, UploadFile -from fastapi.responses import PlainTextResponse +from fastapi.responses import PlainTextResponse, Response from sqlalchemy.orm import Session from app.database import get_db +from app.services.backup_service import export_backup, import_backup from app.services.yaml_service import export_to_yaml, import_from_yaml router = APIRouter(prefix="/api/yaml", tags=["yaml"]) @@ -19,3 +20,21 @@ async def yaml_import(file: UploadFile, db: Session = Depends(get_db)): yaml_str = content.decode("utf-8") result = import_from_yaml(yaml_str, db) return result + + +@router.get("/backup") +def backup_export(db: Session = Depends(get_db)): + content = export_backup(db) + return Response( + content=content, + media_type="application/json", + headers={"Content-Disposition": "attachment; filename=mailfilter-backup.json"}, + ) + + +@router.post("/backup") +async def backup_import(file: UploadFile, db: Session = Depends(get_db)): + content = await file.read() + json_str = content.decode("utf-8") + result = import_backup(json_str, db) + return result diff --git a/app/services/backup_service.py b/app/services/backup_service.py new file mode 100644 index 0000000..0f9eee9 --- /dev/null +++ b/app/services/backup_service.py @@ -0,0 +1,217 @@ +import json +import logging +from datetime import datetime + +from sqlalchemy.orm import Session + +from app.database import SessionLocal +from app.models.db_models import Account, FilterAction, FilterCondition, FilterRule, ProcessedMail + +logger = logging.getLogger(__name__) + +BACKUP_VERSION = 1 + + +def export_backup(db: Session | None = None) -> str: + close_db = False + if db is None: + db = SessionLocal() + close_db = True + + try: + accounts = db.query(Account).order_by(Account.id).all() + data = { + "version": BACKUP_VERSION, + "exported_at": datetime.utcnow().isoformat(), + "accounts": [], + } + + for acc in accounts: + account_data = { + "name": acc.name, + "imap_host": acc.imap_host, + "imap_port": acc.imap_port, + "use_ssl": acc.use_ssl, + "username": acc.username, + "password": acc.password, # verschlüsselt + "smtp_host": acc.smtp_host, + "smtp_port": acc.smtp_port, + "smtp_username": acc.smtp_username, + "smtp_password": acc.smtp_password, # verschlüsselt + "poll_interval_seconds": acc.poll_interval_seconds, + "enabled": acc.enabled, + "filter_rules": [], + "processed_mails": [], + } + + for rule in sorted(acc.filter_rules, key=lambda r: r.priority): + rule_data = { + "name": rule.name, + "priority": rule.priority, + "enabled": rule.enabled, + "stop_processing": rule.stop_processing, + "source_folder": rule.source_folder, + "conditions": [ + { + "field": cond.field.value, + "match_type": cond.match_type.value, + "value": cond.value, + "negate": cond.negate, + } + for cond in rule.conditions + ], + "actions": [ + { + "action_type": action.action_type.value, + "parameter": action.parameter, + } + for action in rule.actions + ], + } + account_data["filter_rules"].append(rule_data) + + # Verarbeitete Mails exportieren + processed = ( + db.query(ProcessedMail) + .filter(ProcessedMail.account_id == acc.id) + .all() + ) + for pm in processed: + account_data["processed_mails"].append({ + "folder": pm.folder, + "mail_uid": pm.mail_uid, + "mail_subject": pm.mail_subject, + "mail_from": pm.mail_from, + "processed_at": pm.processed_at.isoformat() if pm.processed_at else None, + }) + + data["accounts"].append(account_data) + + return json.dumps(data, ensure_ascii=False, indent=2) + finally: + if close_db: + db.close() + + +def import_backup(json_content: str, db: Session | None = None) -> dict: + close_db = False + if db is None: + db = SessionLocal() + close_db = True + + try: + data = json.loads(json_content) + + if "version" not in data or "accounts" not in data: + return {"error": "Ungültiges Backup-Format"} + + stats = { + "accounts_created": 0, + "accounts_updated": 0, + "rules_created": 0, + "processed_restored": 0, + } + + for acc_data in data["accounts"]: + # Konto suchen oder erstellen (Match über username + imap_host) + existing = ( + db.query(Account) + .filter( + Account.username == acc_data["username"], + Account.imap_host == acc_data["imap_host"], + ) + .first() + ) + + if existing: + account = existing + account.name = acc_data["name"] + account.imap_port = acc_data["imap_port"] + account.use_ssl = acc_data["use_ssl"] + account.password = acc_data["password"] + account.smtp_host = acc_data.get("smtp_host") + account.smtp_port = acc_data.get("smtp_port") + account.smtp_username = acc_data.get("smtp_username") + account.smtp_password = acc_data.get("smtp_password") + account.poll_interval_seconds = acc_data.get("poll_interval_seconds", 120) + account.enabled = acc_data.get("enabled", True) + stats["accounts_updated"] += 1 + + # Alte Regeln und processed löschen + db.query(FilterRule).filter(FilterRule.account_id == account.id).delete() + db.query(ProcessedMail).filter(ProcessedMail.account_id == account.id).delete() + db.flush() + else: + account = Account( + name=acc_data["name"], + imap_host=acc_data["imap_host"], + imap_port=acc_data["imap_port"], + use_ssl=acc_data["use_ssl"], + username=acc_data["username"], + password=acc_data["password"], + smtp_host=acc_data.get("smtp_host"), + smtp_port=acc_data.get("smtp_port"), + smtp_username=acc_data.get("smtp_username"), + smtp_password=acc_data.get("smtp_password"), + poll_interval_seconds=acc_data.get("poll_interval_seconds", 120), + enabled=acc_data.get("enabled", True), + ) + db.add(account) + stats["accounts_created"] += 1 + + db.flush() + + # Filterregeln importieren + for rule_data in acc_data.get("filter_rules", []): + rule = FilterRule( + account_id=account.id, + name=rule_data["name"], + priority=rule_data.get("priority", 100), + enabled=rule_data.get("enabled", True), + stop_processing=rule_data.get("stop_processing", False), + source_folder=rule_data.get("source_folder", "INBOX"), + ) + db.add(rule) + db.flush() + + for cond_data in rule_data.get("conditions", []): + db.add(FilterCondition( + rule_id=rule.id, + field=cond_data["field"], + match_type=cond_data["match_type"], + value=cond_data["value"], + negate=cond_data.get("negate", False), + )) + + for action_data in rule_data.get("actions", []): + db.add(FilterAction( + rule_id=rule.id, + action_type=action_data["action_type"], + parameter=action_data.get("parameter"), + )) + + stats["rules_created"] += 1 + + # Verarbeitete Mails wiederherstellen + for pm_data in acc_data.get("processed_mails", []): + db.add(ProcessedMail( + account_id=account.id, + folder=pm_data["folder"], + mail_uid=pm_data["mail_uid"], + mail_subject=pm_data.get("mail_subject"), + mail_from=pm_data.get("mail_from"), + )) + stats["processed_restored"] += 1 + + db.commit() + logger.info("Backup-Import abgeschlossen: %s", stats) + return stats + except json.JSONDecodeError: + return {"error": "Ungültiges JSON"} + except Exception as e: + db.rollback() + logger.error("Backup-Import fehlgeschlagen: %s", e) + return {"error": str(e)} + finally: + if close_db: + db.close() diff --git a/app/services/filter_engine.py b/app/services/filter_engine.py index 6206d50..a757079 100644 --- a/app/services/filter_engine.py +++ b/app/services/filter_engine.py @@ -1,5 +1,6 @@ import logging import re +from datetime import datetime, timedelta from app.models.db_models import ActionType, ConditionField, FilterAction, FilterCondition, FilterRule, MatchType from app.services.imap_client import IMAPClient, MailMessage @@ -19,10 +20,12 @@ def _get_field_value(mail: MailMessage, field: ConditionField) -> str: return mail.body case ConditionField.HAS_ATTACHMENT: return str(mail.has_attachment).lower() + case ConditionField.DATE: + return "" # Date is handled separately return "" -def _match(value: str, pattern: str, match_type: MatchType) -> bool: +def _match_text(value: str, pattern: str, match_type: MatchType) -> bool: match match_type: case MatchType.CONTAINS: return pattern.lower() in value.lower() @@ -37,17 +40,137 @@ def _match(value: str, pattern: str, match_type: MatchType) -> bool: return False -def evaluate_conditions(mail: MailMessage, conditions: list[FilterCondition]) -> bool: - if not conditions: +def _parse_date_value(value: str) -> datetime | None: + for fmt in ("%Y-%m-%d", "%d.%m.%Y", "%Y/%m/%d"): + try: + return datetime.strptime(value.strip(), fmt) + except ValueError: + continue + return None + + +def _match_date(mail_date: datetime | None, pattern: str, match_type: MatchType) -> bool: + if mail_date is None: return False + + now = datetime.utcnow() + mail_day = mail_date.replace(hour=0, minute=0, second=0, microsecond=0) + + match match_type: + case MatchType.ON_DATE: + target = _parse_date_value(pattern) + if not target: + return False + return mail_day == target + + case MatchType.BEFORE: + target = _parse_date_value(pattern) + if not target: + return False + return mail_day < target + + case MatchType.AFTER: + target = _parse_date_value(pattern) + if not target: + return False + return mail_day >= target + + case MatchType.DATE_RANGE: + # Format: "2024-01-01,2024-12-31" + parts = pattern.split(",") + if len(parts) != 2: + logger.warning("Ungültiges Datumsbereich-Format: %s (erwartet: start,ende)", pattern) + return False + start = _parse_date_value(parts[0]) + end = _parse_date_value(parts[1]) + if not start or not end: + return False + return start <= mail_day <= end + + case MatchType.YEAR: + try: + return mail_date.year == int(pattern.strip()) + except ValueError: + return False + + case MatchType.LAST_N_DAYS: + try: + n = int(pattern.strip()) + cutoff = now - timedelta(days=n) + return mail_date >= cutoff + except ValueError: + return False + + case MatchType.LAST_N_WEEKS: + try: + n = int(pattern.strip()) + cutoff = now - timedelta(weeks=n) + return mail_date >= cutoff + except ValueError: + return False + + case MatchType.LAST_N_MONTHS: + try: + n = int(pattern.strip()) + # Approximate: 30 days per month + cutoff = now - timedelta(days=n * 30) + return mail_date >= cutoff + except ValueError: + return False + + case MatchType.OLDER_THAN_DAYS: + try: + n = int(pattern.strip()) + cutoff = now - timedelta(days=n) + return mail_date < cutoff + except ValueError: + return False + + case MatchType.OLDER_THAN_WEEKS: + try: + n = int(pattern.strip()) + cutoff = now - timedelta(weeks=n) + return mail_date < cutoff + except ValueError: + return False + + case MatchType.OLDER_THAN_MONTHS: + try: + n = int(pattern.strip()) + cutoff = now - timedelta(days=n * 30) + return mail_date < cutoff + except ValueError: + return False + + return False + + +def evaluate_conditions(mail: MailMessage, conditions: list[FilterCondition]) -> tuple[bool, list[str]]: + """Returns (matched, details) where details explains each condition check.""" + details = [] + if not conditions: + return False, ["Keine Bedingungen definiert"] for cond in conditions: - field_value = _get_field_value(mail, cond.field) - result = _match(field_value, cond.value, cond.match_type) + if cond.field == ConditionField.DATE: + mail_date_str = mail.date.strftime("%Y-%m-%d %H:%M") if mail.date else "KEIN DATUM" + result = _match_date(mail.date, cond.value, cond.match_type) + neg = "NOT " if cond.negate else "" + details.append( + f"{neg}Datum({mail_date_str}) {cond.match_type.value} '{cond.value}' → {'JA' if result else 'NEIN'}" + ) + else: + field_value = _get_field_value(mail, cond.field) + result = _match_text(field_value, cond.value, cond.match_type) + neg = "NOT " if cond.negate else "" + short_val = field_value[:80] + "…" if len(field_value) > 80 else field_value + details.append( + f"{neg}{cond.field.value}('{short_val}') {cond.match_type.value} '{cond.value}' → {'JA' if result else 'NEIN'}" + ) if cond.negate: result = not result if not result: - return False - return True + return False, details + return True, details def execute_action( @@ -89,12 +212,18 @@ def apply_rules( smtp_config: dict | None = None, ) -> list[dict]: results = [] + eval_details = [] sorted_rules = sorted(rules, key=lambda r: r.priority) for rule in sorted_rules: if not rule.enabled: continue - if not evaluate_conditions(mail, rule.conditions): + + matched, details = evaluate_conditions(mail, rule.conditions) + eval_details.append({"rule": rule.name, "matched": matched, "details": details}) + + if not matched: + logger.debug("Regel '%s' trifft NICHT zu auf Mail %s: %s", rule.name, mail.uid, details) continue logger.info("Regel '%s' trifft auf Mail %s zu (Betreff: %s)", rule.name, mail.uid, mail.subject) @@ -113,4 +242,4 @@ def apply_rules( logger.info("stop_processing aktiv — keine weiteren Regeln für Mail %s", mail.uid) break - return results + return results, eval_details diff --git a/app/services/imap_client.py b/app/services/imap_client.py index 40027e3..96fccd3 100644 --- a/app/services/imap_client.py +++ b/app/services/imap_client.py @@ -1,9 +1,11 @@ import asyncio import email +import email.utils import imaplib import logging import smtplib from dataclasses import dataclass, field +from datetime import datetime from email.header import decode_header from email.message import Message from email.mime.text import MIMEText @@ -19,6 +21,7 @@ class MailMessage: subject: str = "" body: str = "" has_attachment: bool = False + date: datetime | None = None raw: Message | None = field(default=None, repr=False) @@ -35,6 +38,21 @@ def _decode_header_value(value: str | None) -> str: return " ".join(decoded) +def _parse_date(msg: Message) -> datetime | None: + date_str = msg.get("Date") + if not date_str: + return None + try: + parsed = email.utils.parsedate_to_datetime(date_str) + # Convert to naive UTC datetime for consistent comparison + if parsed.tzinfo is not None: + from datetime import timezone + parsed = parsed.astimezone(timezone.utc).replace(tzinfo=None) + return parsed + except Exception: + return None + + def _has_attachment(msg: Message) -> bool: if not msg.is_multipart(): return False @@ -120,44 +138,75 @@ class IMAPClient: return False def list_folders(self) -> list[str]: + import re status, data = self.conn.list() if status != "OK": return [] + # IMAP LIST response format: (\Flags) "delimiter" "folder name" + # Delimiter can be ".", "/", or other characters + pattern = re.compile(r'\(.*?\)\s+"(.?)"\s+(.*)') folders = [] for item in data: if isinstance(item, bytes): - parts = item.decode("utf-8", errors="replace").split(' "/" ') - if len(parts) >= 2: - folder_name = parts[-1].strip().strip('"') + line = item.decode("utf-8", errors="replace") + match = pattern.match(line) + if match: + folder_name = match.group(2).strip().strip('"') folders.append(folder_name) - return folders + else: + logger.debug("Konnte IMAP LIST Zeile nicht parsen: %s", line) + logger.info("IMAP Ordner geladen: %d Ordner gefunden", len(folders)) + return sorted(folders) + + def create_folder(self, folder_name: str) -> bool: + try: + status, _ = self.conn.create(folder_name) + if status == "OK": + logger.info("Ordner erstellt: %s", folder_name) + return True + logger.error("Ordner erstellen fehlgeschlagen: %s", folder_name) + return False + except Exception as e: + logger.error("Fehler beim Erstellen von Ordner '%s': %s", folder_name, e) + return False def fetch_unseen(self, folder: str = "INBOX") -> list[MailMessage]: + """Legacy: Fetch unseen mails. Use fetch_all_uids + fetch_mail for processed-tracking.""" + return self.fetch_mails_by_uids(folder, self.get_all_uids(folder, search="UNSEEN")) + + def get_all_uids(self, folder: str = "INBOX", search: str = "ALL") -> list[str]: self.conn.select(folder) - status, data = self.conn.uid("SEARCH", None, "UNSEEN") + status, data = self.conn.uid("SEARCH", None, search) if status != "OK" or not data[0]: return [] + return [uid.decode() if isinstance(uid, bytes) else str(uid) for uid in data[0].split()] - uids = data[0].split() + def fetch_mail(self, uid: str) -> MailMessage | None: + status, msg_data = self.conn.uid("FETCH", uid, "(RFC822)") + if status != "OK" or not msg_data[0]: + return None + raw_email = msg_data[0][1] + msg = email.message_from_bytes(raw_email) + return MailMessage( + uid=uid, + from_addr=_decode_header_value(msg.get("From")), + to_addr=_decode_header_value(msg.get("To")), + subject=_decode_header_value(msg.get("Subject")), + body=_extract_body(msg), + has_attachment=_has_attachment(msg), + date=_parse_date(msg), + raw=msg, + ) + + def fetch_mails_by_uids(self, folder: str, uids: list[str]) -> list[MailMessage]: + if not uids: + return [] + self.conn.select(folder) messages = [] for uid in uids: - uid_str = uid.decode() if isinstance(uid, bytes) else str(uid) - status, msg_data = self.conn.uid("FETCH", uid_str, "(RFC822)") - if status != "OK" or not msg_data[0]: - continue - raw_email = msg_data[0][1] - msg = email.message_from_bytes(raw_email) - messages.append( - MailMessage( - uid=uid_str, - from_addr=_decode_header_value(msg.get("From")), - to_addr=_decode_header_value(msg.get("To")), - subject=_decode_header_value(msg.get("Subject")), - body=_extract_body(msg), - has_attachment=_has_attachment(msg), - raw=msg, - ) - ) + mail = self.fetch_mail(uid) + if mail: + messages.append(mail) return messages def move_mail(self, uid: str, target_folder: str) -> bool: diff --git a/app/services/log_service.py b/app/services/log_service.py new file mode 100644 index 0000000..d80f3d2 --- /dev/null +++ b/app/services/log_service.py @@ -0,0 +1,66 @@ +import logging +from datetime import datetime, timedelta + +from sqlalchemy.orm import Session + +from app.database import SessionLocal +from app.models.db_models import FilterLog, LogLevel + +logger = logging.getLogger(__name__) + + +def write_log( + message: str, + level: LogLevel = LogLevel.INFO, + account_id: int | None = None, + account_name: str = "", + rule_name: str | None = None, + action_type: str | None = None, + mail_uid: str | None = None, + mail_subject: str | None = None, + mail_from: str | None = None, + folder: str | None = None, + details: str | None = None, + db: Session | None = None, +) -> None: + close_db = False + if db is None: + db = SessionLocal() + close_db = True + try: + entry = FilterLog( + account_id=account_id, + account_name=account_name, + level=level, + message=message, + rule_name=rule_name, + action_type=action_type, + mail_uid=mail_uid, + mail_subject=mail_subject[:500] if mail_subject else None, + mail_from=mail_from[:255] if mail_from else None, + folder=folder, + details=details[:2000] if details else None, + ) + db.add(entry) + db.commit() + except Exception as e: + logger.error("Fehler beim Schreiben des Logs: %s", e) + db.rollback() + finally: + if close_db: + db.close() + + +def cleanup_old_logs(days: int = 30, db: Session | None = None) -> int: + close_db = False + if db is None: + db = SessionLocal() + close_db = True + try: + cutoff = datetime.utcnow() - timedelta(days=days) + count = db.query(FilterLog).filter(FilterLog.created_at < cutoff).delete() + db.commit() + return count + finally: + if close_db: + db.close() diff --git a/app/services/scheduler.py b/app/services/scheduler.py index 874f947..0ecf0ce 100644 --- a/app/services/scheduler.py +++ b/app/services/scheduler.py @@ -6,10 +6,11 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler from sqlalchemy.orm import Session from app.database import SessionLocal -from app.models.db_models import Account, FilterRule +from app.models.db_models import Account, FilterRule, LogLevel, ProcessedMail from app.services.encryption import decrypt from app.services.filter_engine import apply_rules from app.services.imap_client import IMAPClient +from app.services.log_service import cleanup_old_logs, write_log logger = logging.getLogger(__name__) @@ -40,11 +41,21 @@ def _poll_account_sync(account_id: int) -> None: .order_by(FilterRule.priority) .all() ) + + write_log( + message=f"Poll gestartet ({len(rules)} aktive Regel(n))", + level=LogLevel.INFO, + account_id=account.id, + account_name=account.name, + details=", ".join(r.name for r in rules) if rules else "Keine Regeln konfiguriert", + db=db, + ) + if not rules: - logger.debug("Keine aktiven Regeln für Konto '%s'", account.name) + account.last_poll_at = datetime.utcnow() + db.commit() return - # Collect unique source folders source_folders = list({r.source_folder for r in rules}) smtp_config = _build_smtp_config(account) @@ -56,36 +67,189 @@ def _poll_account_sync(account_id: int) -> None: use_ssl=account.use_ssl, ) + total_mails = 0 + total_new = 0 + total_matched = 0 + total_actions = 0 + total_errors = 0 + with client: for folder in source_folders: folder_rules = [r for r in rules if r.source_folder == folder] + + # Alle UIDs im Ordner holen try: - messages = client.fetch_unseen(folder) + all_uids = client.get_all_uids(folder, search="ALL") except Exception as e: - logger.error("Fehler beim Abrufen von %s/%s: %s", account.name, folder, e) + write_log( + message=f"Fehler beim Abrufen von Ordner '{folder}'", + level=LogLevel.ERROR, + account_id=account.id, + account_name=account.name, + folder=folder, + details=str(e), + db=db, + ) continue - if messages: - logger.info( - "Konto '%s', Ordner '%s': %d ungelesene Mails", - account.name, folder, len(messages), - ) + total_mails += len(all_uids) - for mail in messages: - results = apply_rules(client, mail, folder_rules, smtp_config) - for r in results: - level = logging.INFO if r["success"] else logging.ERROR - logger.log( - level, - "Konto '%s': %s %s -> %s (%s)", - account.name, r["action"], r.get("parameter", ""), - "OK" if r["success"] else "FEHLER", r["rule"], + # Bereits verarbeitete UIDs aus DB laden + processed_uids = set( + row[0] for row in db.query(ProcessedMail.mail_uid) + .filter( + ProcessedMail.account_id == account.id, + ProcessedMail.folder == folder, + ) + .all() + ) + + # Neue (unverarbeitete) UIDs ermitteln + new_uids = [uid for uid in all_uids if uid not in processed_uids] + total_new += len(new_uids) + + if not new_uids: + write_log( + message=f"Keine neuen Mails in '{folder}' ({len(all_uids)} gesamt, alle bereits verarbeitet)", + level=LogLevel.INFO, + account_id=account.id, + account_name=account.name, + folder=folder, + db=db, + ) + continue + + write_log( + message=f"{len(new_uids)} neue Mail(s) in '{folder}' ({len(all_uids)} gesamt, {len(processed_uids)} bereits verarbeitet)", + level=LogLevel.INFO, + account_id=account.id, + account_name=account.name, + folder=folder, + db=db, + ) + + # Neue Mails abrufen und verarbeiten + for uid in new_uids: + try: + mail = client.fetch_mail(uid) + except Exception as e: + write_log( + message=f"Fehler beim Abrufen von Mail {uid}", + level=LogLevel.ERROR, + account_id=account.id, + account_name=account.name, + mail_uid=uid, + folder=folder, + details=str(e), + db=db, ) + continue + + if not mail: + continue + + results, eval_details = apply_rules(client, mail, folder_rules, smtp_config) + + # Eval-Details für Log aufbereiten + eval_summary = [] + for ev in eval_details: + status = "TREFFER" if ev["matched"] else "kein Treffer" + checks = " | ".join(ev["details"]) + eval_summary.append(f"Regel '{ev['rule']}': {status} [{checks}]") + + if not results: + write_log( + message=f"Keine Regel trifft zu", + level=LogLevel.INFO, + account_id=account.id, + account_name=account.name, + mail_uid=mail.uid, + mail_subject=mail.subject, + mail_from=mail.from_addr, + folder=folder, + details="\n".join(eval_summary), + db=db, + ) + else: + total_matched += 1 + for r in results: + action_label = r["action"] + param = r.get("parameter", "") + if param: + action_label += f" → {param}" + + if r["success"]: + total_actions += 1 + write_log( + message=f"Aktion ausgeführt: {action_label}", + level=LogLevel.SUCCESS, + account_id=account.id, + account_name=account.name, + rule_name=r["rule"], + action_type=r["action"], + mail_uid=r["mail_uid"], + mail_subject=mail.subject, + mail_from=mail.from_addr, + folder=folder, + details=param, + db=db, + ) + else: + total_errors += 1 + write_log( + message=f"Aktion fehlgeschlagen: {action_label}", + level=LogLevel.ERROR, + account_id=account.id, + account_name=account.name, + rule_name=r["rule"], + action_type=r["action"], + mail_uid=r["mail_uid"], + mail_subject=mail.subject, + mail_from=mail.from_addr, + folder=folder, + details=param, + db=db, + ) + + # Mail als verarbeitet markieren + db.add(ProcessedMail( + account_id=account.id, + folder=folder, + mail_uid=mail.uid, + mail_subject=mail.subject[:500] if mail.subject else None, + mail_from=mail.from_addr[:255] if mail.from_addr else None, + )) + db.flush() + + # Poll-Zusammenfassung + summary_parts = [ + f"{total_mails} Mail(s) im Ordner", + f"{total_new} neu", + f"{total_matched} Treffer", + f"{total_actions} Aktion(en)", + ] + if total_errors > 0: + summary_parts.append(f"{total_errors} Fehler") + + write_log( + message=f"Poll abgeschlossen: {', '.join(summary_parts)}", + level=LogLevel.ERROR if total_errors > 0 else LogLevel.SUCCESS if total_actions > 0 else LogLevel.INFO, + account_id=account.id, + account_name=account.name, + db=db, + ) account.last_poll_at = datetime.utcnow() db.commit() except Exception as e: logger.error("Fehler beim Polling von Konto %s: %s", account_id, e) + db.rollback() + write_log( + message=f"Polling fehlgeschlagen", + level=LogLevel.ERROR, + account_id=account_id, + details=str(e), + ) finally: db.close() @@ -128,6 +292,16 @@ def start_scheduler() -> None: add_account_job(account) finally: db.close() + + # Täglicher Cleanup alter Logs + scheduler.add_job( + lambda: asyncio.get_event_loop().run_in_executor(None, cleanup_old_logs, 30), + "interval", + hours=24, + id="cleanup_logs", + replace_existing=True, + ) + scheduler.start() logger.info("Scheduler gestartet mit %d Jobs", len(scheduler.get_jobs())) diff --git a/app/templates/account_form.html b/app/templates/account_form.html index f4ea8c1..c68c2d6 100644 --- a/app/templates/account_form.html +++ b/app/templates/account_form.html @@ -34,6 +34,8 @@ SSL verwenden + +