addes folder backup attachments logging

This commit is contained in:
duffyduck 2026-03-19 15:31:36 +01:00
parent 61c4384111
commit d148248682
29 changed files with 2298 additions and 115 deletions

View File

@ -7,6 +7,8 @@ RUN pip install --no-cache-dir -r requirements.txt
COPY app/ ./app/
COPY config/ ./config/
COPY alembic/ ./alembic/
COPY alembic.ini .
EXPOSE 8000

View File

@ -123,6 +123,25 @@ Beim Start wird `config/filters.yaml` automatisch importiert (konfigurierbar via
| `YAML_SYNC_ON_STARTUP` | `true` | YAML-Datei beim Start importieren |
| `DATABASE_URL` | `sqlite:///data/mailfilter.db` | Datenbank-Pfad |
## Datenbank-Migrationen
Die Datenbank wird automatisch beim Start per **Alembic** migriert — Konten und Filterregeln bleiben bei Updates erhalten.
Falls du lokal entwickelst und das Schema änderst:
```bash
# Neue Migration erstellen (nach Änderung an db_models.py)
.venv/bin/alembic revision --autogenerate -m "beschreibung der änderung"
# Migration anwenden
.venv/bin/alembic upgrade head
# Migrationsstatus prüfen
.venv/bin/alembic current
```
Im Docker passiert das automatisch beim Container-Start.
## Projektstruktur
```
@ -141,6 +160,9 @@ Beim Start wird `config/filters.yaml` automatisch importiert (konfigurierbar via
│ │ └── encryption.py # Passwort-Verschlüsselung
│ ├── templates/ # Jinja2 HTML-Templates
│ └── static/ # CSS + JS
├── alembic/ # Datenbank-Migrationen
│ ├── env.py
│ └── versions/ # Migrations-Skripte
├── config/filters.yaml # YAML-Filterkonfiguration
├── data/ # SQLite-Datenbank (Docker Volume)
├── docker-compose.yml

149
alembic.ini Normal file
View File

@ -0,0 +1,149 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# this is typically a path given in POSIX (e.g. forward slashes)
# format, relative to the token %(here)s which refers to the location of this
# ini file
script_location = %(here)s/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# Or organize into date-based subdirectories (requires recursive_version_locations = true)
# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory. for multiple paths, the path separator
# is defined by "path_separator" below.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the tzdata library which can be installed by adding
# `alembic[tz]` to the pip requirements.
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to <script_location>/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "path_separator"
# below.
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
# path_separator; This indicates what character is used to split lists of file
# paths, including version_locations and prepend_sys_path within configparser
# files such as alembic.ini.
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
# to provide os-dependent path splitting.
#
# Note that in order to support legacy alembic.ini files, this default does NOT
# take place if path_separator is not present in alembic.ini. If this
# option is omitted entirely, fallback logic is as follows:
#
# 1. Parsing of the version_locations option falls back to using the legacy
# "version_path_separator" key, which if absent then falls back to the legacy
# behavior of splitting on spaces and/or commas.
# 2. Parsing of the prepend_sys_path option falls back to the legacy
# behavior of splitting on spaces, commas, or colons.
#
# Valid values for path_separator are:
#
# path_separator = :
# path_separator = ;
# path_separator = space
# path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# database URL. This is consumed by the user-maintained env.py script only.
# other means of configuring database URLs may be customized within the env.py
# file.
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
# hooks = ruff
# ruff.type = module
# ruff.module = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Alternatively, use the exec runner to execute a binary found on your PATH
# hooks = ruff
# ruff.type = exec
# ruff.executable = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration. This is also consumed by the user-maintained
# env.py script only.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
alembic/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

59
alembic/env.py Normal file
View File

@ -0,0 +1,59 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from alembic import context
from app.config import settings
from app.database import Base
from app.models.db_models import Account, FilterRule, FilterCondition, FilterAction # noqa: F401
config = context.config
# Set DB URL from app config
config.set_main_option("sqlalchemy.url", settings.database_url)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def render_item(type_, obj, autogen_context):
"""Custom render for SQLite enum handling."""
return False
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
render_as_batch=True, # Required for SQLite ALTER TABLE
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=True, # Required for SQLite ALTER TABLE
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

28
alembic/script.py.mako Normal file
View File

@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,48 @@
"""add processed_mails table
Revision ID: 0ef2a4f77557
Revises: 4f8cb93713e8
Create Date: 2026-03-19 13:47:28.217052
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '0ef2a4f77557'
down_revision: Union[str, Sequence[str], None] = '4f8cb93713e8'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('processed_mails',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('folder', sa.String(length=255), nullable=False),
sa.Column('mail_uid', sa.String(length=100), nullable=False),
sa.Column('mail_subject', sa.String(length=500), nullable=True),
sa.Column('mail_from', sa.String(length=255), nullable=True),
sa.Column('processed_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('processed_mails', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_processed_mails_account_id'), ['account_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('processed_mails', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_processed_mails_account_id'))
op.drop_table('processed_mails')
# ### end Alembic commands ###

View File

@ -0,0 +1,47 @@
"""add filter_logs table
Revision ID: 4f8cb93713e8
Revises: c2a398ed74d6
Create Date: 2026-03-19 13:37:33.433556
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '4f8cb93713e8'
down_revision: Union[str, Sequence[str], None] = 'c2a398ed74d6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('filter_logs',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('account_id', sa.Integer(), nullable=True),
sa.Column('account_name', sa.String(length=100), nullable=False),
sa.Column('level', sa.Enum('INFO', 'WARNING', 'ERROR', 'SUCCESS', name='loglevel'), nullable=False),
sa.Column('message', sa.String(length=1000), nullable=False),
sa.Column('rule_name', sa.String(length=200), nullable=True),
sa.Column('action_type', sa.String(length=50), nullable=True),
sa.Column('mail_uid', sa.String(length=100), nullable=True),
sa.Column('mail_subject', sa.String(length=500), nullable=True),
sa.Column('mail_from', sa.String(length=255), nullable=True),
sa.Column('folder', sa.String(length=255), nullable=True),
sa.Column('details', sa.String(length=2000), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('filter_logs')
# ### end Alembic commands ###

View File

@ -0,0 +1,84 @@
"""initial schema
Revision ID: c2a398ed74d6
Revises:
Create Date: 2026-03-19 13:28:01.842649
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'c2a398ed74d6'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('accounts',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('imap_host', sa.String(length=255), nullable=False),
sa.Column('imap_port', sa.Integer(), nullable=False),
sa.Column('use_ssl', sa.Boolean(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('smtp_host', sa.String(length=255), nullable=True),
sa.Column('smtp_port', sa.Integer(), nullable=True),
sa.Column('smtp_username', sa.String(length=255), nullable=True),
sa.Column('smtp_password', sa.String(length=255), nullable=True),
sa.Column('poll_interval_seconds', sa.Integer(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('last_poll_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('filter_rules',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=200), nullable=False),
sa.Column('priority', sa.Integer(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('stop_processing', sa.Boolean(), nullable=False),
sa.Column('source_folder', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('filter_actions',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('rule_id', sa.Integer(), nullable=False),
sa.Column('action_type', sa.Enum('MOVE', 'FORWARD', 'DELETE', 'MARK_READ', name='actiontype'), nullable=False),
sa.Column('parameter', sa.String(length=500), nullable=True),
sa.ForeignKeyConstraint(['rule_id'], ['filter_rules.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('filter_conditions',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('rule_id', sa.Integer(), nullable=False),
sa.Column('field', sa.Enum('FROM', 'TO', 'SUBJECT', 'BODY', 'HAS_ATTACHMENT', 'DATE', name='conditionfield'), nullable=False),
sa.Column('match_type', sa.Enum('CONTAINS', 'REGEX', 'EXACT', 'ON_DATE', 'BEFORE', 'AFTER', 'DATE_RANGE', 'YEAR', 'LAST_N_DAYS', 'LAST_N_WEEKS', 'LAST_N_MONTHS', 'OLDER_THAN_DAYS', 'OLDER_THAN_WEEKS', 'OLDER_THAN_MONTHS', name='matchtype'), nullable=False),
sa.Column('value', sa.String(length=500), nullable=False),
sa.Column('negate', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['rule_id'], ['filter_rules.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('filter_conditions')
op.drop_table('filter_actions')
op.drop_table('filter_rules')
op.drop_table('accounts')
# ### end Alembic commands ###

View File

@ -18,8 +18,12 @@ logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
logger.info("Starte IMAP Mail Filter Service...")
Base.metadata.create_all(bind=engine)
logger.info("Datenbank initialisiert.")
# Datenbank-Migration mit Alembic
from alembic.config import Config
from alembic import command
alembic_cfg = Config("alembic.ini")
command.upgrade(alembic_cfg, "head")
logger.info("Datenbank-Migration abgeschlossen.")
if settings.yaml_sync_on_startup:
from app.services.yaml_service import import_from_file
result = import_from_file()
@ -40,11 +44,12 @@ from sqlalchemy.orm import Session # noqa: E402
from app.database import get_db # noqa: E402
from app.models.db_models import Account # noqa: E402
from app.routers import accounts, filters, yaml_sync # noqa: E402
from app.routers import accounts, filters, logs, yaml_sync # noqa: E402
app.include_router(accounts.router)
app.include_router(filters.router)
app.include_router(yaml_sync.router)
app.include_router(logs.router)
# --- Web-UI Routen ---
@ -98,3 +103,9 @@ def filters_page(request: Request, account_id: int = 0, db: Session = Depends(ge
@app.get("/yaml")
def yaml_page(request: Request):
return templates.TemplateResponse("yaml.html", {"request": request})
@app.get("/logs")
def logs_page(request: Request, db: Session = Depends(get_db)):
accs = db.query(Account).order_by(Account.name).all()
return templates.TemplateResponse("logs.html", {"request": request, "accounts": accs})

View File

@ -13,12 +13,25 @@ class ConditionField(str, enum.Enum):
SUBJECT = "subject"
BODY = "body"
HAS_ATTACHMENT = "has_attachment"
DATE = "date"
class MatchType(str, enum.Enum):
CONTAINS = "contains"
REGEX = "regex"
EXACT = "exact"
# Date-specific match types
ON_DATE = "on_date"
BEFORE = "before"
AFTER = "after"
DATE_RANGE = "date_range"
YEAR = "year"
LAST_N_DAYS = "last_n_days"
LAST_N_WEEKS = "last_n_weeks"
LAST_N_MONTHS = "last_n_months"
OLDER_THAN_DAYS = "older_than_days"
OLDER_THAN_WEEKS = "older_than_weeks"
OLDER_THAN_MONTHS = "older_than_months"
class ActionType(str, enum.Enum):
@ -101,3 +114,42 @@ class FilterAction(Base):
parameter: Mapped[str | None] = mapped_column(String(500), nullable=True)
rule: Mapped["FilterRule"] = relationship(back_populates="actions")
class LogLevel(str, enum.Enum):
INFO = "info"
WARNING = "warning"
ERROR = "error"
SUCCESS = "success"
class FilterLog(Base):
__tablename__ = "filter_logs"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
account_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
account_name: Mapped[str] = mapped_column(String(100), default="")
level: Mapped[LogLevel] = mapped_column(Enum(LogLevel), default=LogLevel.INFO)
message: Mapped[str] = mapped_column(String(1000))
rule_name: Mapped[str | None] = mapped_column(String(200), nullable=True)
action_type: Mapped[str | None] = mapped_column(String(50), nullable=True)
mail_uid: Mapped[str | None] = mapped_column(String(100), nullable=True)
mail_subject: Mapped[str | None] = mapped_column(String(500), nullable=True)
mail_from: Mapped[str | None] = mapped_column(String(255), nullable=True)
folder: Mapped[str | None] = mapped_column(String(255), nullable=True)
details: Mapped[str | None] = mapped_column(String(2000), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.now())
class ProcessedMail(Base):
__tablename__ = "processed_mails"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
account_id: Mapped[int] = mapped_column(ForeignKey("accounts.id", ondelete="CASCADE"), index=True)
folder: Mapped[str] = mapped_column(String(255))
mail_uid: Mapped[str] = mapped_column(String(100))
mail_subject: Mapped[str | None] = mapped_column(String(500), nullable=True)
mail_from: Mapped[str | None] = mapped_column(String(255), nullable=True)
processed_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.now())
account: Mapped["Account"] = relationship()

View File

@ -2,15 +2,17 @@ from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from app.database import get_db
from app.models.db_models import Account
from app.models.db_models import Account, ProcessedMail
from app.schemas.schemas import (
AccountCreate,
AccountListResponse,
AccountResponse,
AccountUpdate,
)
from pydantic import BaseModel
from app.services.encryption import decrypt, encrypt
from app.services.imap_client import async_test_connection
from app.services.imap_client import IMAPClient, async_test_connection
router = APIRouter(prefix="/api/accounts", tags=["accounts"])
@ -87,6 +89,26 @@ async def test_account_connection(account_id: int, db: Session = Depends(get_db)
return {"success": success, "message": "Verbindung erfolgreich" if success else "Verbindung fehlgeschlagen"}
class TestConnectionRequest(BaseModel):
imap_host: str
imap_port: int = 993
use_ssl: bool = True
username: str
password: str
@router.post("/test-connection")
async def test_connection_direct(data: TestConnectionRequest):
success = await async_test_connection(
host=data.imap_host,
port=data.imap_port,
username=data.username,
password=data.password,
use_ssl=data.use_ssl,
)
return {"success": success, "message": "Verbindung erfolgreich" if success else "Verbindung fehlgeschlagen"}
@router.post("/{account_id}/poll-now")
async def poll_now(account_id: int, db: Session = Depends(get_db)):
account = db.get(Account, account_id)
@ -95,3 +117,90 @@ async def poll_now(account_id: int, db: Session = Depends(get_db)):
from app.services.scheduler import poll_account
await poll_account(account_id)
return {"message": f"Polling für '{account.name}' durchgeführt"}
@router.get("/{account_id}/processed")
def get_processed_stats(account_id: int, db: Session = Depends(get_db)):
account = db.get(Account, account_id)
if not account:
raise HTTPException(404, "Konto nicht gefunden")
total = db.query(ProcessedMail).filter(ProcessedMail.account_id == account_id).count()
return {"account_id": account_id, "processed_count": total}
@router.delete("/{account_id}/processed")
def reset_processed(account_id: int, folder: str | None = None, db: Session = Depends(get_db)):
account = db.get(Account, account_id)
if not account:
raise HTTPException(404, "Konto nicht gefunden")
query = db.query(ProcessedMail).filter(ProcessedMail.account_id == account_id)
if folder:
query = query.filter(ProcessedMail.folder == folder)
count = query.delete()
db.commit()
scope = f"Ordner '{folder}'" if folder else "alle Ordner"
return {"message": f"Verarbeitung zurückgesetzt für {scope} ({count} Einträge)", "deleted": count}
def _get_imap_client(account: Account) -> IMAPClient:
return IMAPClient(
host=account.imap_host,
port=account.imap_port,
username=account.username,
password=decrypt(account.password),
use_ssl=account.use_ssl,
)
@router.get("/{account_id}/folders")
async def list_folders(account_id: int, debug: bool = False, db: Session = Depends(get_db)):
account = db.get(Account, account_id)
if not account:
raise HTTPException(404, "Konto nicht gefunden")
import asyncio
def _list():
client = _get_imap_client(account)
with client:
folders = client.list_folders()
raw = None
if debug:
status, data = client.conn.list()
raw = [item.decode("utf-8", errors="replace") if isinstance(item, bytes) else str(item) for item in (data or [])]
return folders, raw
try:
folders, raw = await asyncio.to_thread(_list)
result = {"folders": folders}
if debug and raw is not None:
result["raw"] = raw
return result
except Exception as e:
raise HTTPException(500, f"Fehler beim Abrufen der Ordner: {e}")
class CreateFolderRequest(BaseModel):
folder_name: str
@router.post("/{account_id}/folders")
async def create_folder(account_id: int, data: CreateFolderRequest, db: Session = Depends(get_db)):
account = db.get(Account, account_id)
if not account:
raise HTTPException(404, "Konto nicht gefunden")
import asyncio
def _create():
client = _get_imap_client(account)
with client:
return client.create_folder(data.folder_name)
try:
success = await asyncio.to_thread(_create)
if success:
return {"success": True, "message": f"Ordner '{data.folder_name}' erstellt"}
raise HTTPException(400, f"Ordner '{data.folder_name}' konnte nicht erstellt werden")
except HTTPException:
raise
except Exception as e:
raise HTTPException(500, f"Fehler: {e}")

View File

@ -1,10 +1,26 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
import logging
from app.database import get_db
from app.models.db_models import Account, FilterAction, FilterCondition, FilterRule
from app.models.db_models import Account, FilterAction, FilterCondition, FilterRule, ProcessedMail
from app.schemas.schemas import FilterRuleCreate, FilterRuleResponse, FilterRuleUpdate
logger = logging.getLogger(__name__)
def _reset_processed_for_folder(db: Session, account_id: int, folder: str) -> int:
"""Reset processed mails for a specific account/folder so they get re-evaluated."""
count = (
db.query(ProcessedMail)
.filter(ProcessedMail.account_id == account_id, ProcessedMail.folder == folder)
.delete()
)
if count:
logger.info("Filter geändert: %d verarbeitete Mails in '%s' zurückgesetzt (Account %d)", count, folder, account_id)
return count
router = APIRouter(prefix="/api/filters", tags=["filters"])
@ -54,6 +70,9 @@ def create_filter(data: FilterRuleCreate, db: Session = Depends(get_db)):
action = FilterAction(rule_id=rule.id, **action_data.model_dump())
db.add(action)
# Neue Regel → Ordner zurücksetzen damit bestehende Mails geprüft werden
_reset_processed_for_folder(db, data.account_id, data.source_folder)
db.commit()
db.refresh(rule)
return rule
@ -65,6 +84,7 @@ def update_filter(rule_id: int, data: FilterRuleUpdate, db: Session = Depends(ge
if not rule:
raise HTTPException(404, "Filterregel nicht gefunden")
old_folder = rule.source_folder
update_data = data.model_dump(exclude_unset=True)
# Update conditions if provided
@ -88,6 +108,12 @@ def update_filter(rule_id: int, data: FilterRuleUpdate, db: Session = Depends(ge
for key, value in update_data.items():
setattr(rule, key, value)
# Regel geändert → betroffene Ordner zurücksetzen
_reset_processed_for_folder(db, rule.account_id, old_folder)
new_folder = rule.source_folder
if new_folder != old_folder:
_reset_processed_for_folder(db, rule.account_id, new_folder)
db.commit()
db.refresh(rule)
return rule
@ -98,6 +124,8 @@ def delete_filter(rule_id: int, db: Session = Depends(get_db)):
rule = db.get(FilterRule, rule_id)
if not rule:
raise HTTPException(404, "Filterregel nicht gefunden")
# Ordner zurücksetzen — andere Regeln könnten jetzt anders greifen
_reset_processed_for_folder(db, rule.account_id, rule.source_folder)
db.delete(rule)
db.commit()

55
app/routers/logs.py Normal file
View File

@ -0,0 +1,55 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy.orm import Session
from app.database import get_db
from app.models.db_models import FilterLog
router = APIRouter(prefix="/api/logs", tags=["logs"])
@router.get("/")
def get_logs(
account_id: int | None = None,
level: str | None = None,
limit: int = Query(default=100, le=500),
offset: int = 0,
db: Session = Depends(get_db),
):
query = db.query(FilterLog).order_by(FilterLog.created_at.desc())
if account_id:
query = query.filter(FilterLog.account_id == account_id)
if level:
query = query.filter(FilterLog.level == level)
total = query.count()
logs = query.offset(offset).limit(limit).all()
return {
"total": total,
"logs": [
{
"id": log.id,
"account_id": log.account_id,
"account_name": log.account_name,
"level": log.level.value if log.level else "info",
"message": log.message,
"rule_name": log.rule_name,
"action_type": log.action_type,
"mail_uid": log.mail_uid,
"mail_subject": log.mail_subject,
"mail_from": log.mail_from,
"folder": log.folder,
"details": log.details,
"created_at": log.created_at.isoformat() if log.created_at else None,
}
for log in logs
],
}
@router.delete("/")
def clear_logs(account_id: int | None = None, db: Session = Depends(get_db)):
query = db.query(FilterLog)
if account_id:
query = query.filter(FilterLog.account_id == account_id)
count = query.delete()
db.commit()
return {"deleted": count}

View File

@ -1,8 +1,9 @@
from fastapi import APIRouter, Depends, UploadFile
from fastapi.responses import PlainTextResponse
from fastapi.responses import PlainTextResponse, Response
from sqlalchemy.orm import Session
from app.database import get_db
from app.services.backup_service import export_backup, import_backup
from app.services.yaml_service import export_to_yaml, import_from_yaml
router = APIRouter(prefix="/api/yaml", tags=["yaml"])
@ -19,3 +20,21 @@ async def yaml_import(file: UploadFile, db: Session = Depends(get_db)):
yaml_str = content.decode("utf-8")
result = import_from_yaml(yaml_str, db)
return result
@router.get("/backup")
def backup_export(db: Session = Depends(get_db)):
content = export_backup(db)
return Response(
content=content,
media_type="application/json",
headers={"Content-Disposition": "attachment; filename=mailfilter-backup.json"},
)
@router.post("/backup")
async def backup_import(file: UploadFile, db: Session = Depends(get_db)):
content = await file.read()
json_str = content.decode("utf-8")
result = import_backup(json_str, db)
return result

View File

@ -0,0 +1,217 @@
import json
import logging
from datetime import datetime
from sqlalchemy.orm import Session
from app.database import SessionLocal
from app.models.db_models import Account, FilterAction, FilterCondition, FilterRule, ProcessedMail
logger = logging.getLogger(__name__)
BACKUP_VERSION = 1
def export_backup(db: Session | None = None) -> str:
close_db = False
if db is None:
db = SessionLocal()
close_db = True
try:
accounts = db.query(Account).order_by(Account.id).all()
data = {
"version": BACKUP_VERSION,
"exported_at": datetime.utcnow().isoformat(),
"accounts": [],
}
for acc in accounts:
account_data = {
"name": acc.name,
"imap_host": acc.imap_host,
"imap_port": acc.imap_port,
"use_ssl": acc.use_ssl,
"username": acc.username,
"password": acc.password, # verschlüsselt
"smtp_host": acc.smtp_host,
"smtp_port": acc.smtp_port,
"smtp_username": acc.smtp_username,
"smtp_password": acc.smtp_password, # verschlüsselt
"poll_interval_seconds": acc.poll_interval_seconds,
"enabled": acc.enabled,
"filter_rules": [],
"processed_mails": [],
}
for rule in sorted(acc.filter_rules, key=lambda r: r.priority):
rule_data = {
"name": rule.name,
"priority": rule.priority,
"enabled": rule.enabled,
"stop_processing": rule.stop_processing,
"source_folder": rule.source_folder,
"conditions": [
{
"field": cond.field.value,
"match_type": cond.match_type.value,
"value": cond.value,
"negate": cond.negate,
}
for cond in rule.conditions
],
"actions": [
{
"action_type": action.action_type.value,
"parameter": action.parameter,
}
for action in rule.actions
],
}
account_data["filter_rules"].append(rule_data)
# Verarbeitete Mails exportieren
processed = (
db.query(ProcessedMail)
.filter(ProcessedMail.account_id == acc.id)
.all()
)
for pm in processed:
account_data["processed_mails"].append({
"folder": pm.folder,
"mail_uid": pm.mail_uid,
"mail_subject": pm.mail_subject,
"mail_from": pm.mail_from,
"processed_at": pm.processed_at.isoformat() if pm.processed_at else None,
})
data["accounts"].append(account_data)
return json.dumps(data, ensure_ascii=False, indent=2)
finally:
if close_db:
db.close()
def import_backup(json_content: str, db: Session | None = None) -> dict:
close_db = False
if db is None:
db = SessionLocal()
close_db = True
try:
data = json.loads(json_content)
if "version" not in data or "accounts" not in data:
return {"error": "Ungültiges Backup-Format"}
stats = {
"accounts_created": 0,
"accounts_updated": 0,
"rules_created": 0,
"processed_restored": 0,
}
for acc_data in data["accounts"]:
# Konto suchen oder erstellen (Match über username + imap_host)
existing = (
db.query(Account)
.filter(
Account.username == acc_data["username"],
Account.imap_host == acc_data["imap_host"],
)
.first()
)
if existing:
account = existing
account.name = acc_data["name"]
account.imap_port = acc_data["imap_port"]
account.use_ssl = acc_data["use_ssl"]
account.password = acc_data["password"]
account.smtp_host = acc_data.get("smtp_host")
account.smtp_port = acc_data.get("smtp_port")
account.smtp_username = acc_data.get("smtp_username")
account.smtp_password = acc_data.get("smtp_password")
account.poll_interval_seconds = acc_data.get("poll_interval_seconds", 120)
account.enabled = acc_data.get("enabled", True)
stats["accounts_updated"] += 1
# Alte Regeln und processed löschen
db.query(FilterRule).filter(FilterRule.account_id == account.id).delete()
db.query(ProcessedMail).filter(ProcessedMail.account_id == account.id).delete()
db.flush()
else:
account = Account(
name=acc_data["name"],
imap_host=acc_data["imap_host"],
imap_port=acc_data["imap_port"],
use_ssl=acc_data["use_ssl"],
username=acc_data["username"],
password=acc_data["password"],
smtp_host=acc_data.get("smtp_host"),
smtp_port=acc_data.get("smtp_port"),
smtp_username=acc_data.get("smtp_username"),
smtp_password=acc_data.get("smtp_password"),
poll_interval_seconds=acc_data.get("poll_interval_seconds", 120),
enabled=acc_data.get("enabled", True),
)
db.add(account)
stats["accounts_created"] += 1
db.flush()
# Filterregeln importieren
for rule_data in acc_data.get("filter_rules", []):
rule = FilterRule(
account_id=account.id,
name=rule_data["name"],
priority=rule_data.get("priority", 100),
enabled=rule_data.get("enabled", True),
stop_processing=rule_data.get("stop_processing", False),
source_folder=rule_data.get("source_folder", "INBOX"),
)
db.add(rule)
db.flush()
for cond_data in rule_data.get("conditions", []):
db.add(FilterCondition(
rule_id=rule.id,
field=cond_data["field"],
match_type=cond_data["match_type"],
value=cond_data["value"],
negate=cond_data.get("negate", False),
))
for action_data in rule_data.get("actions", []):
db.add(FilterAction(
rule_id=rule.id,
action_type=action_data["action_type"],
parameter=action_data.get("parameter"),
))
stats["rules_created"] += 1
# Verarbeitete Mails wiederherstellen
for pm_data in acc_data.get("processed_mails", []):
db.add(ProcessedMail(
account_id=account.id,
folder=pm_data["folder"],
mail_uid=pm_data["mail_uid"],
mail_subject=pm_data.get("mail_subject"),
mail_from=pm_data.get("mail_from"),
))
stats["processed_restored"] += 1
db.commit()
logger.info("Backup-Import abgeschlossen: %s", stats)
return stats
except json.JSONDecodeError:
return {"error": "Ungültiges JSON"}
except Exception as e:
db.rollback()
logger.error("Backup-Import fehlgeschlagen: %s", e)
return {"error": str(e)}
finally:
if close_db:
db.close()

View File

@ -1,5 +1,6 @@
import logging
import re
from datetime import datetime, timedelta
from app.models.db_models import ActionType, ConditionField, FilterAction, FilterCondition, FilterRule, MatchType
from app.services.imap_client import IMAPClient, MailMessage
@ -19,10 +20,12 @@ def _get_field_value(mail: MailMessage, field: ConditionField) -> str:
return mail.body
case ConditionField.HAS_ATTACHMENT:
return str(mail.has_attachment).lower()
case ConditionField.DATE:
return "" # Date is handled separately
return ""
def _match(value: str, pattern: str, match_type: MatchType) -> bool:
def _match_text(value: str, pattern: str, match_type: MatchType) -> bool:
match match_type:
case MatchType.CONTAINS:
return pattern.lower() in value.lower()
@ -37,17 +40,137 @@ def _match(value: str, pattern: str, match_type: MatchType) -> bool:
return False
def evaluate_conditions(mail: MailMessage, conditions: list[FilterCondition]) -> bool:
if not conditions:
def _parse_date_value(value: str) -> datetime | None:
for fmt in ("%Y-%m-%d", "%d.%m.%Y", "%Y/%m/%d"):
try:
return datetime.strptime(value.strip(), fmt)
except ValueError:
continue
return None
def _match_date(mail_date: datetime | None, pattern: str, match_type: MatchType) -> bool:
if mail_date is None:
return False
now = datetime.utcnow()
mail_day = mail_date.replace(hour=0, minute=0, second=0, microsecond=0)
match match_type:
case MatchType.ON_DATE:
target = _parse_date_value(pattern)
if not target:
return False
return mail_day == target
case MatchType.BEFORE:
target = _parse_date_value(pattern)
if not target:
return False
return mail_day < target
case MatchType.AFTER:
target = _parse_date_value(pattern)
if not target:
return False
return mail_day >= target
case MatchType.DATE_RANGE:
# Format: "2024-01-01,2024-12-31"
parts = pattern.split(",")
if len(parts) != 2:
logger.warning("Ungültiges Datumsbereich-Format: %s (erwartet: start,ende)", pattern)
return False
start = _parse_date_value(parts[0])
end = _parse_date_value(parts[1])
if not start or not end:
return False
return start <= mail_day <= end
case MatchType.YEAR:
try:
return mail_date.year == int(pattern.strip())
except ValueError:
return False
case MatchType.LAST_N_DAYS:
try:
n = int(pattern.strip())
cutoff = now - timedelta(days=n)
return mail_date >= cutoff
except ValueError:
return False
case MatchType.LAST_N_WEEKS:
try:
n = int(pattern.strip())
cutoff = now - timedelta(weeks=n)
return mail_date >= cutoff
except ValueError:
return False
case MatchType.LAST_N_MONTHS:
try:
n = int(pattern.strip())
# Approximate: 30 days per month
cutoff = now - timedelta(days=n * 30)
return mail_date >= cutoff
except ValueError:
return False
case MatchType.OLDER_THAN_DAYS:
try:
n = int(pattern.strip())
cutoff = now - timedelta(days=n)
return mail_date < cutoff
except ValueError:
return False
case MatchType.OLDER_THAN_WEEKS:
try:
n = int(pattern.strip())
cutoff = now - timedelta(weeks=n)
return mail_date < cutoff
except ValueError:
return False
case MatchType.OLDER_THAN_MONTHS:
try:
n = int(pattern.strip())
cutoff = now - timedelta(days=n * 30)
return mail_date < cutoff
except ValueError:
return False
return False
def evaluate_conditions(mail: MailMessage, conditions: list[FilterCondition]) -> tuple[bool, list[str]]:
"""Returns (matched, details) where details explains each condition check."""
details = []
if not conditions:
return False, ["Keine Bedingungen definiert"]
for cond in conditions:
field_value = _get_field_value(mail, cond.field)
result = _match(field_value, cond.value, cond.match_type)
if cond.field == ConditionField.DATE:
mail_date_str = mail.date.strftime("%Y-%m-%d %H:%M") if mail.date else "KEIN DATUM"
result = _match_date(mail.date, cond.value, cond.match_type)
neg = "NOT " if cond.negate else ""
details.append(
f"{neg}Datum({mail_date_str}) {cond.match_type.value} '{cond.value}'{'JA' if result else 'NEIN'}"
)
else:
field_value = _get_field_value(mail, cond.field)
result = _match_text(field_value, cond.value, cond.match_type)
neg = "NOT " if cond.negate else ""
short_val = field_value[:80] + "" if len(field_value) > 80 else field_value
details.append(
f"{neg}{cond.field.value}('{short_val}') {cond.match_type.value} '{cond.value}'{'JA' if result else 'NEIN'}"
)
if cond.negate:
result = not result
if not result:
return False
return True
return False, details
return True, details
def execute_action(
@ -89,12 +212,18 @@ def apply_rules(
smtp_config: dict | None = None,
) -> list[dict]:
results = []
eval_details = []
sorted_rules = sorted(rules, key=lambda r: r.priority)
for rule in sorted_rules:
if not rule.enabled:
continue
if not evaluate_conditions(mail, rule.conditions):
matched, details = evaluate_conditions(mail, rule.conditions)
eval_details.append({"rule": rule.name, "matched": matched, "details": details})
if not matched:
logger.debug("Regel '%s' trifft NICHT zu auf Mail %s: %s", rule.name, mail.uid, details)
continue
logger.info("Regel '%s' trifft auf Mail %s zu (Betreff: %s)", rule.name, mail.uid, mail.subject)
@ -113,4 +242,4 @@ def apply_rules(
logger.info("stop_processing aktiv — keine weiteren Regeln für Mail %s", mail.uid)
break
return results
return results, eval_details

View File

@ -1,9 +1,11 @@
import asyncio
import email
import email.utils
import imaplib
import logging
import smtplib
from dataclasses import dataclass, field
from datetime import datetime
from email.header import decode_header
from email.message import Message
from email.mime.text import MIMEText
@ -19,6 +21,7 @@ class MailMessage:
subject: str = ""
body: str = ""
has_attachment: bool = False
date: datetime | None = None
raw: Message | None = field(default=None, repr=False)
@ -35,6 +38,21 @@ def _decode_header_value(value: str | None) -> str:
return " ".join(decoded)
def _parse_date(msg: Message) -> datetime | None:
date_str = msg.get("Date")
if not date_str:
return None
try:
parsed = email.utils.parsedate_to_datetime(date_str)
# Convert to naive UTC datetime for consistent comparison
if parsed.tzinfo is not None:
from datetime import timezone
parsed = parsed.astimezone(timezone.utc).replace(tzinfo=None)
return parsed
except Exception:
return None
def _has_attachment(msg: Message) -> bool:
if not msg.is_multipart():
return False
@ -120,44 +138,75 @@ class IMAPClient:
return False
def list_folders(self) -> list[str]:
import re
status, data = self.conn.list()
if status != "OK":
return []
# IMAP LIST response format: (\Flags) "delimiter" "folder name"
# Delimiter can be ".", "/", or other characters
pattern = re.compile(r'\(.*?\)\s+"(.?)"\s+(.*)')
folders = []
for item in data:
if isinstance(item, bytes):
parts = item.decode("utf-8", errors="replace").split(' "/" ')
if len(parts) >= 2:
folder_name = parts[-1].strip().strip('"')
line = item.decode("utf-8", errors="replace")
match = pattern.match(line)
if match:
folder_name = match.group(2).strip().strip('"')
folders.append(folder_name)
return folders
else:
logger.debug("Konnte IMAP LIST Zeile nicht parsen: %s", line)
logger.info("IMAP Ordner geladen: %d Ordner gefunden", len(folders))
return sorted(folders)
def create_folder(self, folder_name: str) -> bool:
try:
status, _ = self.conn.create(folder_name)
if status == "OK":
logger.info("Ordner erstellt: %s", folder_name)
return True
logger.error("Ordner erstellen fehlgeschlagen: %s", folder_name)
return False
except Exception as e:
logger.error("Fehler beim Erstellen von Ordner '%s': %s", folder_name, e)
return False
def fetch_unseen(self, folder: str = "INBOX") -> list[MailMessage]:
"""Legacy: Fetch unseen mails. Use fetch_all_uids + fetch_mail for processed-tracking."""
return self.fetch_mails_by_uids(folder, self.get_all_uids(folder, search="UNSEEN"))
def get_all_uids(self, folder: str = "INBOX", search: str = "ALL") -> list[str]:
self.conn.select(folder)
status, data = self.conn.uid("SEARCH", None, "UNSEEN")
status, data = self.conn.uid("SEARCH", None, search)
if status != "OK" or not data[0]:
return []
return [uid.decode() if isinstance(uid, bytes) else str(uid) for uid in data[0].split()]
uids = data[0].split()
def fetch_mail(self, uid: str) -> MailMessage | None:
status, msg_data = self.conn.uid("FETCH", uid, "(RFC822)")
if status != "OK" or not msg_data[0]:
return None
raw_email = msg_data[0][1]
msg = email.message_from_bytes(raw_email)
return MailMessage(
uid=uid,
from_addr=_decode_header_value(msg.get("From")),
to_addr=_decode_header_value(msg.get("To")),
subject=_decode_header_value(msg.get("Subject")),
body=_extract_body(msg),
has_attachment=_has_attachment(msg),
date=_parse_date(msg),
raw=msg,
)
def fetch_mails_by_uids(self, folder: str, uids: list[str]) -> list[MailMessage]:
if not uids:
return []
self.conn.select(folder)
messages = []
for uid in uids:
uid_str = uid.decode() if isinstance(uid, bytes) else str(uid)
status, msg_data = self.conn.uid("FETCH", uid_str, "(RFC822)")
if status != "OK" or not msg_data[0]:
continue
raw_email = msg_data[0][1]
msg = email.message_from_bytes(raw_email)
messages.append(
MailMessage(
uid=uid_str,
from_addr=_decode_header_value(msg.get("From")),
to_addr=_decode_header_value(msg.get("To")),
subject=_decode_header_value(msg.get("Subject")),
body=_extract_body(msg),
has_attachment=_has_attachment(msg),
raw=msg,
)
)
mail = self.fetch_mail(uid)
if mail:
messages.append(mail)
return messages
def move_mail(self, uid: str, target_folder: str) -> bool:

View File

@ -0,0 +1,66 @@
import logging
from datetime import datetime, timedelta
from sqlalchemy.orm import Session
from app.database import SessionLocal
from app.models.db_models import FilterLog, LogLevel
logger = logging.getLogger(__name__)
def write_log(
message: str,
level: LogLevel = LogLevel.INFO,
account_id: int | None = None,
account_name: str = "",
rule_name: str | None = None,
action_type: str | None = None,
mail_uid: str | None = None,
mail_subject: str | None = None,
mail_from: str | None = None,
folder: str | None = None,
details: str | None = None,
db: Session | None = None,
) -> None:
close_db = False
if db is None:
db = SessionLocal()
close_db = True
try:
entry = FilterLog(
account_id=account_id,
account_name=account_name,
level=level,
message=message,
rule_name=rule_name,
action_type=action_type,
mail_uid=mail_uid,
mail_subject=mail_subject[:500] if mail_subject else None,
mail_from=mail_from[:255] if mail_from else None,
folder=folder,
details=details[:2000] if details else None,
)
db.add(entry)
db.commit()
except Exception as e:
logger.error("Fehler beim Schreiben des Logs: %s", e)
db.rollback()
finally:
if close_db:
db.close()
def cleanup_old_logs(days: int = 30, db: Session | None = None) -> int:
close_db = False
if db is None:
db = SessionLocal()
close_db = True
try:
cutoff = datetime.utcnow() - timedelta(days=days)
count = db.query(FilterLog).filter(FilterLog.created_at < cutoff).delete()
db.commit()
return count
finally:
if close_db:
db.close()

View File

@ -6,10 +6,11 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler
from sqlalchemy.orm import Session
from app.database import SessionLocal
from app.models.db_models import Account, FilterRule
from app.models.db_models import Account, FilterRule, LogLevel, ProcessedMail
from app.services.encryption import decrypt
from app.services.filter_engine import apply_rules
from app.services.imap_client import IMAPClient
from app.services.log_service import cleanup_old_logs, write_log
logger = logging.getLogger(__name__)
@ -40,11 +41,21 @@ def _poll_account_sync(account_id: int) -> None:
.order_by(FilterRule.priority)
.all()
)
write_log(
message=f"Poll gestartet ({len(rules)} aktive Regel(n))",
level=LogLevel.INFO,
account_id=account.id,
account_name=account.name,
details=", ".join(r.name for r in rules) if rules else "Keine Regeln konfiguriert",
db=db,
)
if not rules:
logger.debug("Keine aktiven Regeln für Konto '%s'", account.name)
account.last_poll_at = datetime.utcnow()
db.commit()
return
# Collect unique source folders
source_folders = list({r.source_folder for r in rules})
smtp_config = _build_smtp_config(account)
@ -56,36 +67,189 @@ def _poll_account_sync(account_id: int) -> None:
use_ssl=account.use_ssl,
)
total_mails = 0
total_new = 0
total_matched = 0
total_actions = 0
total_errors = 0
with client:
for folder in source_folders:
folder_rules = [r for r in rules if r.source_folder == folder]
# Alle UIDs im Ordner holen
try:
messages = client.fetch_unseen(folder)
all_uids = client.get_all_uids(folder, search="ALL")
except Exception as e:
logger.error("Fehler beim Abrufen von %s/%s: %s", account.name, folder, e)
write_log(
message=f"Fehler beim Abrufen von Ordner '{folder}'",
level=LogLevel.ERROR,
account_id=account.id,
account_name=account.name,
folder=folder,
details=str(e),
db=db,
)
continue
if messages:
logger.info(
"Konto '%s', Ordner '%s': %d ungelesene Mails",
account.name, folder, len(messages),
)
total_mails += len(all_uids)
for mail in messages:
results = apply_rules(client, mail, folder_rules, smtp_config)
for r in results:
level = logging.INFO if r["success"] else logging.ERROR
logger.log(
level,
"Konto '%s': %s %s -> %s (%s)",
account.name, r["action"], r.get("parameter", ""),
"OK" if r["success"] else "FEHLER", r["rule"],
# Bereits verarbeitete UIDs aus DB laden
processed_uids = set(
row[0] for row in db.query(ProcessedMail.mail_uid)
.filter(
ProcessedMail.account_id == account.id,
ProcessedMail.folder == folder,
)
.all()
)
# Neue (unverarbeitete) UIDs ermitteln
new_uids = [uid for uid in all_uids if uid not in processed_uids]
total_new += len(new_uids)
if not new_uids:
write_log(
message=f"Keine neuen Mails in '{folder}' ({len(all_uids)} gesamt, alle bereits verarbeitet)",
level=LogLevel.INFO,
account_id=account.id,
account_name=account.name,
folder=folder,
db=db,
)
continue
write_log(
message=f"{len(new_uids)} neue Mail(s) in '{folder}' ({len(all_uids)} gesamt, {len(processed_uids)} bereits verarbeitet)",
level=LogLevel.INFO,
account_id=account.id,
account_name=account.name,
folder=folder,
db=db,
)
# Neue Mails abrufen und verarbeiten
for uid in new_uids:
try:
mail = client.fetch_mail(uid)
except Exception as e:
write_log(
message=f"Fehler beim Abrufen von Mail {uid}",
level=LogLevel.ERROR,
account_id=account.id,
account_name=account.name,
mail_uid=uid,
folder=folder,
details=str(e),
db=db,
)
continue
if not mail:
continue
results, eval_details = apply_rules(client, mail, folder_rules, smtp_config)
# Eval-Details für Log aufbereiten
eval_summary = []
for ev in eval_details:
status = "TREFFER" if ev["matched"] else "kein Treffer"
checks = " | ".join(ev["details"])
eval_summary.append(f"Regel '{ev['rule']}': {status} [{checks}]")
if not results:
write_log(
message=f"Keine Regel trifft zu",
level=LogLevel.INFO,
account_id=account.id,
account_name=account.name,
mail_uid=mail.uid,
mail_subject=mail.subject,
mail_from=mail.from_addr,
folder=folder,
details="\n".join(eval_summary),
db=db,
)
else:
total_matched += 1
for r in results:
action_label = r["action"]
param = r.get("parameter", "")
if param:
action_label += f"{param}"
if r["success"]:
total_actions += 1
write_log(
message=f"Aktion ausgeführt: {action_label}",
level=LogLevel.SUCCESS,
account_id=account.id,
account_name=account.name,
rule_name=r["rule"],
action_type=r["action"],
mail_uid=r["mail_uid"],
mail_subject=mail.subject,
mail_from=mail.from_addr,
folder=folder,
details=param,
db=db,
)
else:
total_errors += 1
write_log(
message=f"Aktion fehlgeschlagen: {action_label}",
level=LogLevel.ERROR,
account_id=account.id,
account_name=account.name,
rule_name=r["rule"],
action_type=r["action"],
mail_uid=r["mail_uid"],
mail_subject=mail.subject,
mail_from=mail.from_addr,
folder=folder,
details=param,
db=db,
)
# Mail als verarbeitet markieren
db.add(ProcessedMail(
account_id=account.id,
folder=folder,
mail_uid=mail.uid,
mail_subject=mail.subject[:500] if mail.subject else None,
mail_from=mail.from_addr[:255] if mail.from_addr else None,
))
db.flush()
# Poll-Zusammenfassung
summary_parts = [
f"{total_mails} Mail(s) im Ordner",
f"{total_new} neu",
f"{total_matched} Treffer",
f"{total_actions} Aktion(en)",
]
if total_errors > 0:
summary_parts.append(f"{total_errors} Fehler")
write_log(
message=f"Poll abgeschlossen: {', '.join(summary_parts)}",
level=LogLevel.ERROR if total_errors > 0 else LogLevel.SUCCESS if total_actions > 0 else LogLevel.INFO,
account_id=account.id,
account_name=account.name,
db=db,
)
account.last_poll_at = datetime.utcnow()
db.commit()
except Exception as e:
logger.error("Fehler beim Polling von Konto %s: %s", account_id, e)
db.rollback()
write_log(
message=f"Polling fehlgeschlagen",
level=LogLevel.ERROR,
account_id=account_id,
details=str(e),
)
finally:
db.close()
@ -128,6 +292,16 @@ def start_scheduler() -> None:
add_account_job(account)
finally:
db.close()
# Täglicher Cleanup alter Logs
scheduler.add_job(
lambda: asyncio.get_event_loop().run_in_executor(None, cleanup_old_logs, 30),
"interval",
hours=24,
id="cleanup_logs",
replace_existing=True,
)
scheduler.start()
logger.info("Scheduler gestartet mit %d Jobs", len(scheduler.get_jobs()))

View File

@ -34,6 +34,8 @@
<input type="checkbox" name="use_ssl" {{ 'checked' if (not account or account.use_ssl) else '' }}>
SSL verwenden
</label>
<button type="button" class="outline" onclick="testConnection()" id="test-btn">Verbindung testen</button>
<div id="test-result" style="display:none; margin-bottom:1rem; padding:0.75rem; border-radius:4px;"></div>
<label>
Polling-Intervall (Sekunden)
<input type="number" name="poll_interval_seconds" value="{{ account.poll_interval_seconds if account else 120 }}" min="30">
@ -77,6 +79,81 @@
{% block scripts %}
<script>
async function testConnection() {
const form = document.getElementById('account-form');
const host = form.imap_host.value;
const port = parseInt(form.imap_port.value);
const username = form.username.value;
const password = form.password.value;
const use_ssl = form.use_ssl.checked;
const resultDiv = document.getElementById('test-result');
const btn = document.getElementById('test-btn');
if (!host || !username) {
resultDiv.style.display = 'block';
resultDiv.style.background = '#fff3cd';
resultDiv.style.color = '#856404';
resultDiv.textContent = 'Bitte Server und Benutzername ausfüllen.';
return;
}
{% if account %}
// Bestehendes Konto: wenn kein neues Passwort eingegeben, über Account-ID testen
if (!password) {
btn.setAttribute('aria-busy', 'true');
btn.textContent = 'Teste...';
resultDiv.style.display = 'none';
try {
const resp = await fetch('/api/accounts/{{ account.id }}/test', {method: 'POST'});
const data = await resp.json();
resultDiv.style.display = 'block';
resultDiv.style.background = data.success ? '#d4edda' : '#f8d7da';
resultDiv.style.color = data.success ? '#155724' : '#721c24';
resultDiv.textContent = data.success ? 'Verbindung erfolgreich!' : 'Verbindung fehlgeschlagen.';
} catch(e) {
resultDiv.style.display = 'block';
resultDiv.style.background = '#f8d7da';
resultDiv.style.color = '#721c24';
resultDiv.textContent = 'Fehler: ' + e.message;
}
btn.removeAttribute('aria-busy');
btn.textContent = 'Verbindung testen';
return;
}
{% else %}
if (!password) {
resultDiv.style.display = 'block';
resultDiv.style.background = '#fff3cd';
resultDiv.style.color = '#856404';
resultDiv.textContent = 'Bitte Passwort eingeben.';
return;
}
{% endif %}
btn.setAttribute('aria-busy', 'true');
btn.textContent = 'Teste...';
resultDiv.style.display = 'none';
try {
const resp = await fetch('/api/accounts/test-connection', {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({imap_host: host, imap_port: port, username, password, use_ssl}),
});
const data = await resp.json();
resultDiv.style.display = 'block';
resultDiv.style.background = data.success ? '#d4edda' : '#f8d7da';
resultDiv.style.color = data.success ? '#155724' : '#721c24';
resultDiv.textContent = data.success ? 'Verbindung erfolgreich!' : 'Verbindung fehlgeschlagen.';
} catch(e) {
resultDiv.style.display = 'block';
resultDiv.style.background = '#f8d7da';
resultDiv.style.color = '#721c24';
resultDiv.textContent = 'Fehler: ' + e.message;
}
btn.removeAttribute('aria-busy');
btn.textContent = 'Verbindung testen';
}
document.getElementById('account-form').addEventListener('submit', async (e) => {
e.preventDefault();
const form = e.target;

View File

@ -16,6 +16,7 @@
<li><a href="/">Dashboard</a></li>
<li><a href="/accounts">Konten</a></li>
<li><a href="/filters">Filter</a></li>
<li><a href="/logs">Log</a></li>
<li><a href="/yaml">YAML</a></li>
</ul>
</nav>

View File

@ -24,10 +24,12 @@
<p><strong>Polling:</strong> alle {{ acc.poll_interval_seconds }}s</p>
<p><strong>Letzter Poll:</strong> {{ acc.last_poll_at or "Noch nie" }}</p>
<p><strong>Filterregeln:</strong> {{ acc.filter_rule_count }}</p>
<p><strong>Verarbeitet:</strong> <span id="processed-{{ acc.id }}">...</span> Mails</p>
<footer>
<div role="group">
<button class="outline" onclick="pollNow({{ acc.id }})">Jetzt prüfen</button>
<button class="outline" onclick="testConnection({{ acc.id }})">Verbindungstest</button>
<button class="outline contrast" onclick="resetProcessed({{ acc.id }}, '{{ acc.name }}')">Zurücksetzen</button>
</div>
</footer>
</article>
@ -71,5 +73,33 @@ async function testConnection(accountId) {
msg.textContent = 'Fehler: ' + e.message;
}
}
async function resetProcessed(accountId, name) {
if (!confirm(`Verarbeitung für "${name}" zurücksetzen?\n\nAlle Mails werden beim nächsten Poll erneut gegen die Filterregeln geprüft.`)) return;
const msg = document.getElementById('status-message');
msg.style.display = 'block';
try {
const resp = await fetch(`/api/accounts/${accountId}/processed`, {method: 'DELETE'});
const data = await resp.json();
msg.textContent = data.message;
loadProcessedCounts();
} catch(e) {
msg.textContent = 'Fehler: ' + e.message;
}
}
async function loadProcessedCounts() {
{% for acc in accounts %}
fetch(`/api/accounts/{{ acc.id }}/processed`)
.then(r => r.json())
.then(d => {
const el = document.getElementById('processed-{{ acc.id }}');
if (el) el.textContent = d.processed_count;
})
.catch(() => {});
{% endfor %}
}
loadProcessedCounts();
</script>
{% endblock %}

View File

@ -34,7 +34,10 @@
</label>
<label>
Quellordner
<input type="text" id="filter-source-folder" value="INBOX">
<div role="group">
<input type="text" id="filter-source-folder" value="INBOX" readonly>
<button type="button" class="outline" onclick="openFolderBrowser('filter-source-folder')">Durchsuchen</button>
</div>
</label>
</div>
<label>
@ -59,27 +62,121 @@
</form>
</article>
</dialog>
<dialog id="folder-browser">
<article style="min-width: 50vw; max-width: 700px;">
<header>
<button aria-label="Close" rel="prev" onclick="document.getElementById('folder-browser').close()"></button>
<h3>Ordner auswählen</h3>
</header>
<div id="folder-tree" style="max-height: 400px; overflow-y: auto;">
<p aria-busy="true">Ordner werden geladen...</p>
</div>
<div id="folder-status" style="display:none; margin-top:0.5rem; padding:0.5rem; border-radius:4px; background: var(--pico-muted-border-color);"></div>
</article>
</dialog>
{% endblock %}
{% block scripts %}
<style>
.folder-row {
display: flex;
align-items: center;
padding: 0.35rem 0.5rem;
border-radius: 4px;
cursor: pointer;
transition: background 0.15s;
}
.folder-row:hover {
background: var(--pico-primary-focus);
}
.folder-row .folder-icon {
flex-shrink: 0;
width: 1.4em;
margin-right: 0.4rem;
}
.folder-row .folder-label {
flex: 1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.folder-row .folder-path {
font-size: 0.75rem;
opacity: 0.45;
margin-left: 0.5rem;
white-space: nowrap;
}
.folder-row .folder-add-btn {
flex-shrink: 0;
background: none;
border: none;
cursor: pointer;
font-size: 1.1rem;
padding: 0.1rem 0.3rem;
border-radius: 4px;
opacity: 0.5;
transition: opacity 0.15s;
color: var(--pico-color);
}
.folder-row .folder-add-btn:hover {
opacity: 1;
background: var(--pico-primary-focus);
}
.folder-new-input {
display: flex;
align-items: center;
gap: 0.3rem;
padding: 0.25rem 0;
margin-bottom: 0;
}
.folder-new-input input {
margin-bottom: 0;
padding: 0.3rem 0.5rem;
font-size: 0.9rem;
height: auto;
}
.folder-new-input button {
margin-bottom: 0;
padding: 0.3rem 0.6rem;
font-size: 0.85rem;
white-space: nowrap;
}
</style>
<script>
const FIELDS = [
{value: 'from', label: 'Von'},
{value: 'to', label: 'An'},
{value: 'subject', label: 'Betreff'},
{value: 'body', label: 'Text'},
{value: 'has_attachment', label: 'Hat Anhang'},
{value: 'from', label: 'Von', type: 'text'},
{value: 'to', label: 'An', type: 'text'},
{value: 'subject', label: 'Betreff', type: 'text'},
{value: 'body', label: 'Text', type: 'text'},
{value: 'has_attachment', label: 'Hat Anhang', type: 'bool'},
{value: 'date', label: 'Datum', type: 'date'},
];
const MATCH_TYPES = [
const TEXT_MATCH_TYPES = [
{value: 'contains', label: 'enthält'},
{value: 'regex', label: 'Regex'},
{value: 'exact', label: 'exakt'},
];
const DATE_MATCH_TYPES = [
{value: 'on_date', label: 'am (exakt)', inputType: 'date', placeholder: ''},
{value: 'before', label: 'vor', inputType: 'date', placeholder: ''},
{value: 'after', label: 'ab / nach', inputType: 'date', placeholder: ''},
{value: 'date_range', label: 'Zeitraum (vonbis)', inputType: 'daterange', placeholder: ''},
{value: 'year', label: 'im Jahr', inputType: 'number', placeholder: 'z.B. 2025'},
{value: 'last_n_days', label: 'letzte N Tage', inputType: 'number', placeholder: 'Anzahl Tage'},
{value: 'last_n_weeks', label: 'letzte N Wochen', inputType: 'number', placeholder: 'Anzahl Wochen'},
{value: 'last_n_months', label: 'letzte N Monate', inputType: 'number', placeholder: 'Anzahl Monate'},
{value: 'older_than_days', label: 'älter als N Tage', inputType: 'number', placeholder: 'Anzahl Tage'},
{value: 'older_than_weeks', label: 'älter als N Wochen', inputType: 'number', placeholder: 'Anzahl Wochen'},
{value: 'older_than_months', label: 'älter als N Monate', inputType: 'number', placeholder: 'Anzahl Monate'},
];
// Combined for display in filter list
const ALL_MATCH_TYPES = [...TEXT_MATCH_TYPES, ...DATE_MATCH_TYPES];
const ACTION_TYPES = [
{value: 'move', label: 'Verschieben in Ordner', needsParam: true, paramLabel: 'Zielordner'},
{value: 'forward', label: 'Weiterleiten an', needsParam: true, paramLabel: 'E-Mail-Adresse'},
{value: 'delete', label: 'Löschen', needsParam: false},
{value: 'mark_read', label: 'Als gelesen markieren', needsParam: false},
{value: 'move', label: 'Verschieben in Ordner', needsParam: true, needsFolder: true, paramLabel: 'Zielordner'},
{value: 'forward', label: 'Weiterleiten an', needsParam: true, needsFolder: false, paramLabel: 'E-Mail-Adresse'},
{value: 'delete', label: 'Löschen', needsParam: false, needsFolder: false},
{value: 'mark_read', label: 'Als gelesen markieren', needsParam: false, needsFolder: false},
];
async function loadFilters() {
@ -102,7 +199,7 @@ async function loadFilters() {
for (const f of filters) {
const conds = f.conditions.map(c => {
const fieldLabel = FIELDS.find(x => x.value === c.field)?.label || c.field;
const matchLabel = MATCH_TYPES.find(x => x.value === c.match_type)?.label || c.match_type;
const matchLabel = ALL_MATCH_TYPES.find(x => x.value === c.match_type)?.label || c.match_type;
return `${c.negate ? 'NICHT ' : ''}${fieldLabel} ${matchLabel} "${c.value}"`;
}).join('<br>');
const acts = f.actions.map(a => {
@ -129,51 +226,143 @@ async function loadFilters() {
function addCondition(data = null) {
const list = document.getElementById('conditions-list');
const idx = list.children.length;
const uid = Date.now() + '_' + list.children.length;
const div = document.createElement('div');
div.className = 'grid';
div.className = 'cond-row';
div.style.marginBottom = '0.5rem';
div.dataset.uid = uid;
const selectedField = FIELDS.find(f => f.value === (data?.field || 'from')) || FIELDS[0];
div.innerHTML = `
<select name="cond_field_${idx}">
${FIELDS.map(f => `<option value="${f.value}" ${data?.field === f.value ? 'selected' : ''}>${f.label}</option>`).join('')}
</select>
<select name="cond_match_${idx}">
${MATCH_TYPES.map(m => `<option value="${m.value}" ${data?.match_type === m.value ? 'selected' : ''}>${m.label}</option>`).join('')}
</select>
<input type="text" name="cond_value_${idx}" value="${data?.value || ''}" placeholder="Wert" required>
<label style="white-space:nowrap"><input type="checkbox" name="cond_negate_${idx}" ${data?.negate ? 'checked' : ''}> NOT</label>
<button type="button" class="outline small contrast" onclick="this.parentElement.remove()">X</button>
<div style="display:flex; gap:0.4rem; align-items:center; flex-wrap:wrap;">
<select class="cond-field" style="flex:0 0 auto; width:auto; margin-bottom:0;" onchange="onCondFieldChange(this)">
${FIELDS.map(f => `<option value="${f.value}" data-type="${f.type}" ${data?.field === f.value ? 'selected' : ''}>${f.label}</option>`).join('')}
</select>
<div class="cond-match-container" style="flex:0 0 auto;"></div>
<div class="cond-value-container" style="flex:1; min-width:150px;"></div>
<label style="white-space:nowrap; margin-bottom:0;"><input type="checkbox" class="cond-negate" ${data?.negate ? 'checked' : ''} style="margin-bottom:0;"> NOT</label>
<button type="button" class="outline small contrast" onclick="this.closest('.cond-row').remove()" style="margin-bottom:0;">X</button>
</div>
`;
list.appendChild(div);
updateConditionInputs(div, selectedField.type, data);
}
function onCondFieldChange(select) {
const row = select.closest('.cond-row');
const opt = select.options[select.selectedIndex];
const fieldType = opt.dataset.type;
updateConditionInputs(row, fieldType, null);
}
function updateConditionInputs(row, fieldType, data) {
const matchContainer = row.querySelector('.cond-match-container');
const valueContainer = row.querySelector('.cond-value-container');
if (fieldType === 'date') {
// Date field: show date match types
matchContainer.innerHTML = `
<select class="cond-match" style="margin-bottom:0; width:auto;" onchange="onDateMatchChange(this)">
${DATE_MATCH_TYPES.map(m => `<option value="${m.value}" data-input-type="${m.inputType}" data-placeholder="${m.placeholder}" ${data?.match_type === m.value ? 'selected' : ''}>${m.label}</option>`).join('')}
</select>
`;
const matchSelect = matchContainer.querySelector('select');
const selectedMatch = DATE_MATCH_TYPES.find(m => m.value === (data?.match_type || 'on_date')) || DATE_MATCH_TYPES[0];
renderDateValueInput(valueContainer, selectedMatch, data?.value || '');
} else if (fieldType === 'bool') {
// Boolean (has_attachment): just true/false
matchContainer.innerHTML = `
<select class="cond-match" style="margin-bottom:0; width:auto;">
<option value="exact" selected>ist</option>
</select>
`;
valueContainer.innerHTML = `
<select class="cond-value" style="margin-bottom:0;">
<option value="true" ${data?.value === 'true' ? 'selected' : ''}>Ja</option>
<option value="false" ${data?.value === 'false' ? 'selected' : ''}>Nein</option>
</select>
`;
} else {
// Text fields
matchContainer.innerHTML = `
<select class="cond-match" style="margin-bottom:0; width:auto;">
${TEXT_MATCH_TYPES.map(m => `<option value="${m.value}" ${data?.match_type === m.value ? 'selected' : ''}>${m.label}</option>`).join('')}
</select>
`;
valueContainer.innerHTML = `
<input type="text" class="cond-value" value="${data?.value || ''}" placeholder="Wert" required style="margin-bottom:0;">
`;
}
}
function onDateMatchChange(select) {
const row = select.closest('.cond-row');
const opt = select.options[select.selectedIndex];
const inputType = opt.dataset.inputType;
const placeholder = opt.dataset.placeholder;
const matchInfo = DATE_MATCH_TYPES.find(m => m.value === select.value);
const valueContainer = row.querySelector('.cond-value-container');
renderDateValueInput(valueContainer, matchInfo, '');
}
function renderDateValueInput(container, matchInfo, currentValue) {
if (matchInfo.inputType === 'daterange') {
// Two date inputs for range
const parts = currentValue ? currentValue.split(',') : ['', ''];
container.innerHTML = `
<div style="display:flex; gap:0.3rem; align-items:center;">
<input type="date" class="cond-value-from" value="${parts[0] || ''}" style="margin-bottom:0; flex:1;">
<span></span>
<input type="date" class="cond-value-to" value="${parts[1] || ''}" style="margin-bottom:0; flex:1;">
</div>
`;
} else if (matchInfo.inputType === 'date') {
container.innerHTML = `<input type="date" class="cond-value" value="${currentValue || ''}" style="margin-bottom:0;">`;
} else if (matchInfo.inputType === 'number') {
container.innerHTML = `<input type="number" class="cond-value" value="${currentValue || ''}" placeholder="${matchInfo.placeholder}" min="1" style="margin-bottom:0;">`;
}
}
function addAction(data = null) {
const list = document.getElementById('actions-list');
const idx = list.children.length;
const div = document.createElement('div');
div.className = 'grid';
div.style.marginBottom = '0.5rem';
const selectedType = ACTION_TYPES.find(a => a.value === data?.action_type);
const selectedType = ACTION_TYPES.find(a => a.value === (data?.action_type || 'move'));
const hideParam = selectedType && !selectedType.needsParam && !data?.parameter;
const inputId = `act_param_${Date.now()}_${idx}`;
div.innerHTML = `
<select name="act_type_${idx}" onchange="toggleActionParam(this)">
${ACTION_TYPES.map(a => `<option value="${a.value}" data-needs-param="${a.needsParam}" data-param-label="${a.paramLabel || ''}" ${data?.action_type === a.value ? 'selected' : ''}>${a.label}</option>`).join('')}
</select>
<input type="text" name="act_param_${idx}" value="${data?.parameter || ''}" placeholder="${selectedType?.paramLabel || 'Parameter'}" ${selectedType?.needsParam === false && !data?.parameter ? 'style=display:none' : ''}>
<button type="button" class="outline small contrast" onclick="this.parentElement.remove()">X</button>
<div style="display:flex; gap:0.5rem; align-items:center;">
<select name="act_type_${idx}" style="flex:0 0 auto; width:auto;" onchange="toggleActionParam(this)">
${ACTION_TYPES.map(a => `<option value="${a.value}" data-needs-param="${a.needsParam}" data-needs-folder="${a.needsFolder}" data-param-label="${a.paramLabel || ''}" ${data?.action_type === a.value ? 'selected' : ''}>${a.label}</option>`).join('')}
</select>
<div class="act-param-group" style="flex:1; display:${hideParam ? 'none' : 'flex'}; gap:0.5rem; align-items:center;">
<input type="text" name="${inputId}" value="${data?.parameter || ''}" placeholder="${selectedType?.paramLabel || 'Parameter'}" style="margin-bottom:0; flex:1;" ${selectedType?.needsFolder ? 'readonly' : ''}>
<button type="button" class="outline act-browse-btn" onclick="openFolderBrowser('${inputId}')" style="${selectedType?.needsFolder ? '' : 'display:none;'} margin-bottom:0; white-space:nowrap;">Durchsuchen</button>
</div>
<button type="button" class="outline small contrast" onclick="this.closest('[style*=margin-bottom]').remove()" style="margin-bottom:0;">X</button>
</div>
`;
list.appendChild(div);
}
function toggleActionParam(select) {
const opt = select.options[select.selectedIndex];
const input = select.parentElement.querySelector('input[type=text]');
const row = select.closest('div');
const group = row.querySelector('.act-param-group');
const browseBtn = group.querySelector('.act-browse-btn');
const input = group.querySelector('input[type=text]');
if (opt.dataset.needsParam === 'true') {
input.style.display = '';
group.style.display = 'flex';
input.placeholder = opt.dataset.paramLabel;
input.readOnly = opt.dataset.needsFolder === 'true';
} else {
input.style.display = 'none';
group.style.display = 'none';
input.value = '';
}
browseBtn.style.display = opt.dataset.needsFolder === 'true' ? '' : 'none';
}
function openNewFilter() {
@ -210,14 +399,26 @@ function collectFormData() {
const condRows = document.getElementById('conditions-list').children;
for (let i = 0; i < condRows.length; i++) {
const row = condRows[i];
const selects = row.querySelectorAll('select');
const input = row.querySelector('input[type=text]');
const checkbox = row.querySelector('input[type=checkbox]');
const fieldSelect = row.querySelector('.cond-field');
const matchSelect = row.querySelector('.cond-match');
const negate = row.querySelector('.cond-negate');
// Get value based on input type
let value = '';
const rangeFrom = row.querySelector('.cond-value-from');
const rangeTo = row.querySelector('.cond-value-to');
if (rangeFrom && rangeTo) {
value = rangeFrom.value + ',' + rangeTo.value;
} else {
const valInput = row.querySelector('.cond-value');
value = valInput ? valInput.value : '';
}
conditions.push({
field: selects[0].value,
match_type: selects[1].value,
value: input.value,
negate: checkbox.checked,
field: fieldSelect.value,
match_type: matchSelect.value,
value: value,
negate: negate ? negate.checked : false,
});
}
const actions = [];
@ -225,10 +426,10 @@ function collectFormData() {
for (let i = 0; i < actRows.length; i++) {
const row = actRows[i];
const select = row.querySelector('select');
const input = row.querySelector('input[type=text]');
const input = row.querySelector('.act-param-group input[type=text]');
actions.push({
action_type: select.value,
parameter: input.value || null,
parameter: input ? (input.value || null) : null,
});
}
return {
@ -277,6 +478,157 @@ async function deleteFilter(id) {
loadFilters();
}
// --- Ordner-Browser (Baumansicht) ---
let folderTargetInputName = null;
let folderSeparator = '.';
async function openFolderBrowser(targetInputName) {
const accountId = document.getElementById('account-select').value;
if (!accountId) {
alert('Bitte zuerst ein Konto auswählen');
return;
}
folderTargetInputName = targetInputName;
document.getElementById('folder-status').style.display = 'none';
document.getElementById('folder-browser').showModal();
await refreshFolderList(accountId);
}
async function refreshFolderList(accountId) {
const container = document.getElementById('folder-tree');
container.innerHTML = '<p aria-busy="true">Ordner werden geladen...</p>';
try {
const resp = await fetch(`/api/accounts/${accountId}/folders`);
const data = await resp.json();
// Detect separator: if any folder contains ".", use "."; otherwise "/"
folderSeparator = data.folders.some(f => f.includes('.')) ? '.' : '/';
renderFolderTree(data.folders);
} catch(e) {
container.innerHTML = `<p>Fehler beim Laden: ${e.message}</p>`;
}
}
function buildTree(folders) {
const root = {children: {}, folders: []};
for (const path of folders) {
const parts = path.split(folderSeparator);
let node = root;
for (let i = 0; i < parts.length; i++) {
const part = parts[i];
if (!node.children[part]) {
node.children[part] = {
name: part,
fullPath: parts.slice(0, i + 1).join(folderSeparator),
children: {},
depth: i,
};
}
node = node.children[part];
}
}
return root;
}
function renderFolderTree(folders) {
const container = document.getElementById('folder-tree');
if (!folders.length) {
container.innerHTML = '<p>Keine Ordner gefunden.</p>';
return;
}
const tree = buildTree(folders);
container.innerHTML = renderNode(tree, -1);
}
function renderNode(node, depth) {
let html = '';
const sortedKeys = Object.keys(node.children).sort();
for (const key of sortedKeys) {
const child = node.children[key];
const indent = (depth + 1) * 1.2;
const esc = child.fullPath.replace(/'/g, "\\'").replace(/"/g, '&quot;');
const hasChildren = Object.keys(child.children).length > 0;
html += `<div class="folder-row" style="padding-left:${indent + 0.5}rem;" onclick="selectFolder('${esc}')">
<span class="folder-icon">${hasChildren ? '📂' : '📁'}</span>
<span class="folder-label">${child.name}</span>
${depth >= 0 ? `<span class="folder-path">${child.fullPath}</span>` : ''}
<button class="folder-add-btn" title="Unterordner erstellen" onclick="event.stopPropagation(); showNewSubfolder('${esc}', this)">📁+</button>
</div>`;
if (hasChildren) {
html += renderNode(child, depth + 1);
}
}
return html;
}
function selectFolder(folderName) {
const target = document.querySelector(`[name="${folderTargetInputName}"]`)
|| document.getElementById(folderTargetInputName);
if (target) target.value = folderName;
document.getElementById('folder-browser').close();
}
function showNewSubfolder(parentPath, btn) {
// Remove any existing new-folder input
document.querySelectorAll('.folder-new-input').forEach(el => el.remove());
const row = btn.closest('.folder-row');
const inputRow = document.createElement('div');
inputRow.className = 'folder-new-input';
inputRow.style.paddingLeft = row.style.paddingLeft;
inputRow.style.marginLeft = '1.8rem';
inputRow.innerHTML = `
<span style="margin-right:0.3rem;">📁</span>
<input type="text" placeholder="Neuer Ordnername" autofocus>
<button type="button" class="outline small" onclick="doCreateSubfolder('${parentPath.replace(/'/g, "\\'")}', this)">Erstellen</button>
<button type="button" class="outline small contrast" onclick="this.parentElement.remove()">X</button>
`;
row.after(inputRow);
inputRow.querySelector('input').focus();
inputRow.querySelector('input').addEventListener('keydown', (e) => {
if (e.key === 'Enter') {
e.preventDefault();
doCreateSubfolder(parentPath, inputRow.querySelector('.outline.small'));
}
if (e.key === 'Escape') inputRow.remove();
});
}
async function doCreateSubfolder(parentPath, btn) {
const input = btn.parentElement.querySelector('input');
const name = input.value.trim();
if (!name) return;
const fullPath = parentPath + folderSeparator + name;
const accountId = document.getElementById('account-select').value;
const status = document.getElementById('folder-status');
btn.setAttribute('aria-busy', 'true');
try {
const resp = await fetch(`/api/accounts/${accountId}/folders`, {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({folder_name: fullPath}),
});
const data = await resp.json();
if (resp.ok) {
status.style.display = 'block';
status.textContent = `Ordner "${fullPath}" erstellt`;
setTimeout(() => status.style.display = 'none', 3000);
await refreshFolderList(accountId);
} else {
status.style.display = 'block';
status.textContent = 'Fehler: ' + (data.detail || JSON.stringify(data));
}
} catch(e) {
status.style.display = 'block';
status.textContent = 'Fehler: ' + e.message;
}
btn.removeAttribute('aria-busy');
}
// Auto-load if account is pre-selected
if (document.getElementById('account-select').value) loadFilters();
</script>

202
app/templates/logs.html Normal file
View File

@ -0,0 +1,202 @@
{% extends "base.html" %}
{% block title %}Verarbeitungslog — IMAP Mail Filter{% endblock %}
{% block content %}
<h1>Verarbeitungslog</h1>
<div style="display:flex; gap:1rem; align-items:end; flex-wrap:wrap; margin-bottom:1rem;">
<label style="margin-bottom:0;">
Konto
<select id="log-account" onchange="loadLogs()" style="margin-bottom:0;">
<option value="">Alle Konten</option>
{% for acc in accounts %}
<option value="{{ acc.id }}">{{ acc.name }}</option>
{% endfor %}
</select>
</label>
<label style="margin-bottom:0;">
Level
<select id="log-level" onchange="loadLogs()" style="margin-bottom:0;">
<option value="">Alle</option>
<option value="success">Erfolg</option>
<option value="info">Info</option>
<option value="warning">Warnung</option>
<option value="error">Fehler</option>
</select>
</label>
<div role="group" style="margin-bottom:0;">
<button class="outline small" onclick="loadLogs()">Aktualisieren</button>
<button class="outline small contrast" onclick="clearLogs()">Log leeren</button>
</div>
<label style="margin-bottom:0; margin-left:auto;">
<input type="checkbox" id="auto-refresh" onchange="toggleAutoRefresh()" style="margin-bottom:0;">
Auto-Refresh (5s)
</label>
</div>
<div id="log-stats" style="margin-bottom:1rem;"></div>
<div id="log-container">
<p aria-busy="true">Logs werden geladen...</p>
</div>
<div id="log-paging" style="display:flex; justify-content:center; gap:1rem; margin-top:1rem;"></div>
{% endblock %}
{% block scripts %}
<style>
.log-table {
width: 100%;
font-size: 0.9rem;
}
.log-table th, .log-table td {
padding: 0.4rem 0.6rem;
vertical-align: top;
}
.log-row { border-left: 3px solid transparent; }
.log-row[data-level="success"] { border-left-color: #28a745; }
.log-row[data-level="info"] { border-left-color: #17a2b8; }
.log-row[data-level="warning"] { border-left-color: #ffc107; }
.log-row[data-level="error"] { border-left-color: #dc3545; }
.log-level {
display: inline-block;
padding: 0.1rem 0.4rem;
border-radius: 3px;
font-size: 0.75rem;
font-weight: bold;
text-transform: uppercase;
}
.log-level-success { background: #d4edda; color: #155724; }
.log-level-info { background: #d1ecf1; color: #0c5460; }
.log-level-warning { background: #fff3cd; color: #856404; }
.log-level-error { background: #f8d7da; color: #721c24; }
.log-detail { font-size: 0.8rem; opacity: 0.7; }
.log-mail-info { font-size: 0.8rem; color: var(--pico-muted-color); }
</style>
<script>
let currentOffset = 0;
const PAGE_SIZE = 50;
let refreshTimer = null;
async function loadLogs(offset = 0) {
currentOffset = offset;
const accountId = document.getElementById('log-account').value;
const level = document.getElementById('log-level').value;
const container = document.getElementById('log-container');
let url = `/api/logs/?limit=${PAGE_SIZE}&offset=${offset}`;
if (accountId) url += `&account_id=${accountId}`;
if (level) url += `&level=${level}`;
try {
const resp = await fetch(url);
const data = await resp.json();
renderStats(data.total);
renderLogs(data.logs);
renderPaging(data.total, offset);
} catch(e) {
container.innerHTML = `<p>Fehler beim Laden: ${e.message}</p>`;
}
}
function renderStats(total) {
document.getElementById('log-stats').innerHTML = `<small>${total} Einträge gesamt</small>`;
}
function renderLogs(logs) {
const container = document.getElementById('log-container');
if (!logs.length) {
container.innerHTML = '<article><p>Keine Log-Einträge vorhanden.</p></article>';
return;
}
let html = `<table class="log-table">
<thead><tr>
<th style="width:140px;">Zeit</th>
<th style="width:60px;">Level</th>
<th style="width:120px;">Konto</th>
<th>Nachricht</th>
<th style="width:120px;">Regel</th>
</tr></thead><tbody>`;
for (const log of logs) {
const time = log.created_at ? new Date(log.created_at).toLocaleString('de-DE', {
day: '2-digit', month: '2-digit', year: '2-digit',
hour: '2-digit', minute: '2-digit', second: '2-digit'
}) : '';
const levelClass = `log-level-${log.level}`;
const levelLabel = {success: 'OK', info: 'Info', warning: 'Warn', error: 'Fehler'}[log.level] || log.level;
let message = escapeHtml(log.message);
if (log.mail_subject || log.mail_from) {
message += `<div class="log-mail-info">`;
if (log.mail_from) message += `Von: ${escapeHtml(log.mail_from)}<br>`;
if (log.mail_subject) message += `Betreff: ${escapeHtml(log.mail_subject)}`;
message += `</div>`;
}
if (log.details) {
message += `<div class="log-detail">${escapeHtml(log.details)}</div>`;
}
html += `<tr class="log-row" data-level="${log.level}">
<td><small>${time}</small></td>
<td><span class="log-level ${levelClass}">${levelLabel}</span></td>
<td><small>${escapeHtml(log.account_name || '')}</small></td>
<td>${message}</td>
<td><small>${escapeHtml(log.rule_name || '')}</small></td>
</tr>`;
}
html += '</tbody></table>';
container.innerHTML = html;
}
function renderPaging(total, offset) {
const paging = document.getElementById('log-paging');
if (total <= PAGE_SIZE) {
paging.innerHTML = '';
return;
}
const page = Math.floor(offset / PAGE_SIZE) + 1;
const totalPages = Math.ceil(total / PAGE_SIZE);
let html = '';
if (offset > 0) {
html += `<button class="outline small" onclick="loadLogs(${offset - PAGE_SIZE})">Neuere</button>`;
}
html += `<small>Seite ${page} / ${totalPages}</small>`;
if (offset + PAGE_SIZE < total) {
html += `<button class="outline small" onclick="loadLogs(${offset + PAGE_SIZE})">Ältere</button>`;
}
paging.innerHTML = html;
}
async function clearLogs() {
if (!confirm('Log wirklich leeren?')) return;
const accountId = document.getElementById('log-account').value;
let url = '/api/logs/';
if (accountId) url += `?account_id=${accountId}`;
await fetch(url, {method: 'DELETE'});
loadLogs();
}
function toggleAutoRefresh() {
if (document.getElementById('auto-refresh').checked) {
refreshTimer = setInterval(() => loadLogs(currentOffset), 5000);
} else {
clearInterval(refreshTimer);
refreshTimer = null;
}
}
function escapeHtml(text) {
const div = document.createElement('div');
div.textContent = text;
return div.innerHTML;
}
// Initial load
loadLogs();
</script>
{% endblock %}

View File

@ -1,30 +1,77 @@
{% extends "base.html" %}
{% block title %}YAML — IMAP Mail Filter{% endblock %}
{% block title %}Import / Export — IMAP Mail Filter{% endblock %}
{% block content %}
<h1>YAML Import / Export</h1>
<h1>Import / Export</h1>
<h2>Komplett-Backup</h2>
<p>Enthält alles: Konten (inkl. Passwörter), Filterregeln und Verarbeitungsstatus. Der Encryption-Key muss beim Import identisch sein.</p>
<div class="grid">
<article>
<header><h3>Export</h3></header>
<p>Aktuelle Konfiguration als YAML-Datei herunterladen.</p>
<button onclick="exportYaml()">YAML exportieren</button>
<pre id="yaml-preview" style="max-height: 400px; overflow-y: auto; display: none;"></pre>
<header><h3>Backup erstellen</h3></header>
<p>Gesamte Konfiguration als JSON-Datei herunterladen.</p>
<button onclick="exportBackup()">Backup herunterladen</button>
</article>
<article>
<header><h3>Import</h3></header>
<p>YAML-Datei hochladen um Konten und Filterregeln zu importieren.</p>
<form id="import-form">
<header><h3>Backup wiederherstellen</h3></header>
<p>Backup-Datei hochladen. Bestehende Konten werden aktualisiert, neue angelegt.</p>
<form id="backup-import-form">
<input type="file" name="file" accept=".json" required>
<button type="submit">Wiederherstellen</button>
</form>
<div id="backup-result" style="display:none"></div>
</article>
</div>
<hr>
<h2>YAML Filterregeln</h2>
<p>Nur Konten und Filterregeln — ohne Passwörter (können als Umgebungsvariablen referenziert werden).</p>
<div class="grid">
<article>
<header><h3>YAML Export</h3></header>
<button onclick="exportYaml()">YAML exportieren</button>
<pre id="yaml-preview" style="max-height: 300px; overflow-y: auto; display: none; margin-top: 1rem;"></pre>
</article>
<article>
<header><h3>YAML Import</h3></header>
<form id="yaml-import-form">
<input type="file" name="file" accept=".yaml,.yml" required>
<button type="submit">Importieren</button>
</form>
<div id="import-result" style="display:none"></div>
<div id="yaml-result" style="display:none"></div>
</article>
</div>
{% endblock %}
{% block scripts %}
<script>
function exportBackup() {
window.location.href = '/api/yaml/backup';
}
document.getElementById('backup-import-form').addEventListener('submit', async (e) => {
e.preventDefault();
if (!confirm('Backup wiederherstellen?\n\nBestehende Konten mit gleichem Benutzernamen/Server werden überschrieben.')) return;
const formData = new FormData(e.target);
const resp = await fetch('/api/yaml/backup', {method: 'POST', body: formData});
const result = await resp.json();
const div = document.getElementById('backup-result');
div.style.display = 'block';
if (result.error) {
div.innerHTML = `<article role="alert">Fehler: ${result.error}</article>`;
} else {
div.innerHTML = `<article>
Backup wiederhergestellt!<br>
Konten erstellt: ${result.accounts_created}<br>
Konten aktualisiert: ${result.accounts_updated}<br>
Regeln erstellt: ${result.rules_created}<br>
Verarbeitungsstatus: ${result.processed_restored} Einträge
</article>`;
}
});
async function exportYaml() {
const resp = await fetch('/api/yaml/export');
const text = await resp.text();
@ -32,7 +79,6 @@ async function exportYaml() {
pre.textContent = text;
pre.style.display = 'block';
// Also trigger download
const blob = new Blob([text], {type: 'text/yaml'});
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
@ -42,12 +88,12 @@ async function exportYaml() {
URL.revokeObjectURL(url);
}
document.getElementById('import-form').addEventListener('submit', async (e) => {
document.getElementById('yaml-import-form').addEventListener('submit', async (e) => {
e.preventDefault();
const formData = new FormData(e.target);
const resp = await fetch('/api/yaml/import', {method: 'POST', body: formData});
const result = await resp.json();
const div = document.getElementById('import-result');
const div = document.getElementById('yaml-result');
div.style.display = 'block';
if (result.error) {
div.innerHTML = `<article role="alert">Fehler: ${result.error}</article>`;

View File

@ -2,7 +2,7 @@ services:
mailfilter:
build: .
ports:
- "8080:8000"
- "8192:8000"
volumes:
- ./data:/app/data
- ./config:/app/config

View File

@ -8,3 +8,4 @@ python-multipart>=0.0.6
apscheduler>=3.10
pyyaml>=6.0
cryptography>=42.0
alembic>=1.13

View File

@ -1,7 +1,14 @@
from datetime import datetime, timedelta
from unittest.mock import MagicMock
from app.models.db_models import ActionType, ConditionField, MatchType
from app.services.filter_engine import evaluate_conditions
from app.services.filter_engine import evaluate_conditions as _evaluate_conditions_raw
def evaluate_conditions(mail, conditions):
"""Wrapper that returns only the bool result for tests."""
matched, details = _evaluate_conditions_raw(mail, conditions)
return matched
from app.services.imap_client import MailMessage
@ -22,6 +29,7 @@ def _make_mail(**kwargs):
subject=kwargs.get("subject", "Test Subject"),
body=kwargs.get("body", "This is the body."),
has_attachment=kwargs.get("has_attachment", False),
date=kwargs.get("date", None),
)
@ -99,6 +107,123 @@ def test_has_attachment_negate():
assert evaluate_conditions(mail, [cond]) is True
# --- Date conditions ---
def test_date_on_date():
mail = _make_mail(date=datetime(2025, 3, 15, 10, 30))
cond = _make_condition("date", "on_date", "2025-03-15")
assert evaluate_conditions(mail, [cond]) is True
def test_date_on_date_no_match():
mail = _make_mail(date=datetime(2025, 3, 14, 10, 30))
cond = _make_condition("date", "on_date", "2025-03-15")
assert evaluate_conditions(mail, [cond]) is False
def test_date_before():
mail = _make_mail(date=datetime(2025, 1, 10))
cond = _make_condition("date", "before", "2025-02-01")
assert evaluate_conditions(mail, [cond]) is True
def test_date_after():
mail = _make_mail(date=datetime(2025, 6, 1))
cond = _make_condition("date", "after", "2025-03-01")
assert evaluate_conditions(mail, [cond]) is True
def test_date_range_inside():
mail = _make_mail(date=datetime(2025, 6, 15))
cond = _make_condition("date", "date_range", "2025-01-01,2025-12-31")
assert evaluate_conditions(mail, [cond]) is True
def test_date_range_outside():
mail = _make_mail(date=datetime(2024, 6, 15))
cond = _make_condition("date", "date_range", "2025-01-01,2025-12-31")
assert evaluate_conditions(mail, [cond]) is False
def test_date_year():
mail = _make_mail(date=datetime(2025, 8, 20))
cond = _make_condition("date", "year", "2025")
assert evaluate_conditions(mail, [cond]) is True
def test_date_year_no_match():
mail = _make_mail(date=datetime(2024, 8, 20))
cond = _make_condition("date", "year", "2025")
assert evaluate_conditions(mail, [cond]) is False
def test_date_last_n_days():
mail = _make_mail(date=datetime.utcnow() - timedelta(days=5))
cond = _make_condition("date", "last_n_days", "10")
assert evaluate_conditions(mail, [cond]) is True
def test_date_last_n_days_too_old():
mail = _make_mail(date=datetime.utcnow() - timedelta(days=20))
cond = _make_condition("date", "last_n_days", "10")
assert evaluate_conditions(mail, [cond]) is False
def test_date_last_n_weeks():
mail = _make_mail(date=datetime.utcnow() - timedelta(weeks=1))
cond = _make_condition("date", "last_n_weeks", "2")
assert evaluate_conditions(mail, [cond]) is True
def test_date_last_n_months():
mail = _make_mail(date=datetime.utcnow() - timedelta(days=15))
cond = _make_condition("date", "last_n_months", "1")
assert evaluate_conditions(mail, [cond]) is True
def test_date_older_than_days():
mail = _make_mail(date=datetime.utcnow() - timedelta(days=100))
cond = _make_condition("date", "older_than_days", "30")
assert evaluate_conditions(mail, [cond]) is True
def test_date_older_than_days_too_recent():
mail = _make_mail(date=datetime.utcnow() - timedelta(days=5))
cond = _make_condition("date", "older_than_days", "30")
assert evaluate_conditions(mail, [cond]) is False
def test_date_older_than_weeks():
mail = _make_mail(date=datetime.utcnow() - timedelta(weeks=10))
cond = _make_condition("date", "older_than_weeks", "4")
assert evaluate_conditions(mail, [cond]) is True
def test_date_older_than_months():
mail = _make_mail(date=datetime.utcnow() - timedelta(days=200))
cond = _make_condition("date", "older_than_months", "3")
assert evaluate_conditions(mail, [cond]) is True
def test_date_no_date_in_mail():
mail = _make_mail(date=None)
cond = _make_condition("date", "on_date", "2025-03-15")
assert evaluate_conditions(mail, [cond]) is False
def test_date_negate():
mail = _make_mail(date=datetime(2025, 3, 15))
cond = _make_condition("date", "year", "2024", negate=True)
assert evaluate_conditions(mail, [cond]) is True
def test_date_german_format():
mail = _make_mail(date=datetime(2025, 3, 15))
cond = _make_condition("date", "on_date", "15.03.2025")
assert evaluate_conditions(mail, [cond]) is True
def test_empty_conditions():
mail = _make_mail()
assert evaluate_conditions(mail, []) is False