addes folder backup attachments logging
This commit is contained in:
@@ -0,0 +1,217 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import SessionLocal
|
||||
from app.models.db_models import Account, FilterAction, FilterCondition, FilterRule, ProcessedMail
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BACKUP_VERSION = 1
|
||||
|
||||
|
||||
def export_backup(db: Session | None = None) -> str:
|
||||
close_db = False
|
||||
if db is None:
|
||||
db = SessionLocal()
|
||||
close_db = True
|
||||
|
||||
try:
|
||||
accounts = db.query(Account).order_by(Account.id).all()
|
||||
data = {
|
||||
"version": BACKUP_VERSION,
|
||||
"exported_at": datetime.utcnow().isoformat(),
|
||||
"accounts": [],
|
||||
}
|
||||
|
||||
for acc in accounts:
|
||||
account_data = {
|
||||
"name": acc.name,
|
||||
"imap_host": acc.imap_host,
|
||||
"imap_port": acc.imap_port,
|
||||
"use_ssl": acc.use_ssl,
|
||||
"username": acc.username,
|
||||
"password": acc.password, # verschlüsselt
|
||||
"smtp_host": acc.smtp_host,
|
||||
"smtp_port": acc.smtp_port,
|
||||
"smtp_username": acc.smtp_username,
|
||||
"smtp_password": acc.smtp_password, # verschlüsselt
|
||||
"poll_interval_seconds": acc.poll_interval_seconds,
|
||||
"enabled": acc.enabled,
|
||||
"filter_rules": [],
|
||||
"processed_mails": [],
|
||||
}
|
||||
|
||||
for rule in sorted(acc.filter_rules, key=lambda r: r.priority):
|
||||
rule_data = {
|
||||
"name": rule.name,
|
||||
"priority": rule.priority,
|
||||
"enabled": rule.enabled,
|
||||
"stop_processing": rule.stop_processing,
|
||||
"source_folder": rule.source_folder,
|
||||
"conditions": [
|
||||
{
|
||||
"field": cond.field.value,
|
||||
"match_type": cond.match_type.value,
|
||||
"value": cond.value,
|
||||
"negate": cond.negate,
|
||||
}
|
||||
for cond in rule.conditions
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"action_type": action.action_type.value,
|
||||
"parameter": action.parameter,
|
||||
}
|
||||
for action in rule.actions
|
||||
],
|
||||
}
|
||||
account_data["filter_rules"].append(rule_data)
|
||||
|
||||
# Verarbeitete Mails exportieren
|
||||
processed = (
|
||||
db.query(ProcessedMail)
|
||||
.filter(ProcessedMail.account_id == acc.id)
|
||||
.all()
|
||||
)
|
||||
for pm in processed:
|
||||
account_data["processed_mails"].append({
|
||||
"folder": pm.folder,
|
||||
"mail_uid": pm.mail_uid,
|
||||
"mail_subject": pm.mail_subject,
|
||||
"mail_from": pm.mail_from,
|
||||
"processed_at": pm.processed_at.isoformat() if pm.processed_at else None,
|
||||
})
|
||||
|
||||
data["accounts"].append(account_data)
|
||||
|
||||
return json.dumps(data, ensure_ascii=False, indent=2)
|
||||
finally:
|
||||
if close_db:
|
||||
db.close()
|
||||
|
||||
|
||||
def import_backup(json_content: str, db: Session | None = None) -> dict:
|
||||
close_db = False
|
||||
if db is None:
|
||||
db = SessionLocal()
|
||||
close_db = True
|
||||
|
||||
try:
|
||||
data = json.loads(json_content)
|
||||
|
||||
if "version" not in data or "accounts" not in data:
|
||||
return {"error": "Ungültiges Backup-Format"}
|
||||
|
||||
stats = {
|
||||
"accounts_created": 0,
|
||||
"accounts_updated": 0,
|
||||
"rules_created": 0,
|
||||
"processed_restored": 0,
|
||||
}
|
||||
|
||||
for acc_data in data["accounts"]:
|
||||
# Konto suchen oder erstellen (Match über username + imap_host)
|
||||
existing = (
|
||||
db.query(Account)
|
||||
.filter(
|
||||
Account.username == acc_data["username"],
|
||||
Account.imap_host == acc_data["imap_host"],
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if existing:
|
||||
account = existing
|
||||
account.name = acc_data["name"]
|
||||
account.imap_port = acc_data["imap_port"]
|
||||
account.use_ssl = acc_data["use_ssl"]
|
||||
account.password = acc_data["password"]
|
||||
account.smtp_host = acc_data.get("smtp_host")
|
||||
account.smtp_port = acc_data.get("smtp_port")
|
||||
account.smtp_username = acc_data.get("smtp_username")
|
||||
account.smtp_password = acc_data.get("smtp_password")
|
||||
account.poll_interval_seconds = acc_data.get("poll_interval_seconds", 120)
|
||||
account.enabled = acc_data.get("enabled", True)
|
||||
stats["accounts_updated"] += 1
|
||||
|
||||
# Alte Regeln und processed löschen
|
||||
db.query(FilterRule).filter(FilterRule.account_id == account.id).delete()
|
||||
db.query(ProcessedMail).filter(ProcessedMail.account_id == account.id).delete()
|
||||
db.flush()
|
||||
else:
|
||||
account = Account(
|
||||
name=acc_data["name"],
|
||||
imap_host=acc_data["imap_host"],
|
||||
imap_port=acc_data["imap_port"],
|
||||
use_ssl=acc_data["use_ssl"],
|
||||
username=acc_data["username"],
|
||||
password=acc_data["password"],
|
||||
smtp_host=acc_data.get("smtp_host"),
|
||||
smtp_port=acc_data.get("smtp_port"),
|
||||
smtp_username=acc_data.get("smtp_username"),
|
||||
smtp_password=acc_data.get("smtp_password"),
|
||||
poll_interval_seconds=acc_data.get("poll_interval_seconds", 120),
|
||||
enabled=acc_data.get("enabled", True),
|
||||
)
|
||||
db.add(account)
|
||||
stats["accounts_created"] += 1
|
||||
|
||||
db.flush()
|
||||
|
||||
# Filterregeln importieren
|
||||
for rule_data in acc_data.get("filter_rules", []):
|
||||
rule = FilterRule(
|
||||
account_id=account.id,
|
||||
name=rule_data["name"],
|
||||
priority=rule_data.get("priority", 100),
|
||||
enabled=rule_data.get("enabled", True),
|
||||
stop_processing=rule_data.get("stop_processing", False),
|
||||
source_folder=rule_data.get("source_folder", "INBOX"),
|
||||
)
|
||||
db.add(rule)
|
||||
db.flush()
|
||||
|
||||
for cond_data in rule_data.get("conditions", []):
|
||||
db.add(FilterCondition(
|
||||
rule_id=rule.id,
|
||||
field=cond_data["field"],
|
||||
match_type=cond_data["match_type"],
|
||||
value=cond_data["value"],
|
||||
negate=cond_data.get("negate", False),
|
||||
))
|
||||
|
||||
for action_data in rule_data.get("actions", []):
|
||||
db.add(FilterAction(
|
||||
rule_id=rule.id,
|
||||
action_type=action_data["action_type"],
|
||||
parameter=action_data.get("parameter"),
|
||||
))
|
||||
|
||||
stats["rules_created"] += 1
|
||||
|
||||
# Verarbeitete Mails wiederherstellen
|
||||
for pm_data in acc_data.get("processed_mails", []):
|
||||
db.add(ProcessedMail(
|
||||
account_id=account.id,
|
||||
folder=pm_data["folder"],
|
||||
mail_uid=pm_data["mail_uid"],
|
||||
mail_subject=pm_data.get("mail_subject"),
|
||||
mail_from=pm_data.get("mail_from"),
|
||||
))
|
||||
stats["processed_restored"] += 1
|
||||
|
||||
db.commit()
|
||||
logger.info("Backup-Import abgeschlossen: %s", stats)
|
||||
return stats
|
||||
except json.JSONDecodeError:
|
||||
return {"error": "Ungültiges JSON"}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error("Backup-Import fehlgeschlagen: %s", e)
|
||||
return {"error": str(e)}
|
||||
finally:
|
||||
if close_db:
|
||||
db.close()
|
||||
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.models.db_models import ActionType, ConditionField, FilterAction, FilterCondition, FilterRule, MatchType
|
||||
from app.services.imap_client import IMAPClient, MailMessage
|
||||
@@ -19,10 +20,12 @@ def _get_field_value(mail: MailMessage, field: ConditionField) -> str:
|
||||
return mail.body
|
||||
case ConditionField.HAS_ATTACHMENT:
|
||||
return str(mail.has_attachment).lower()
|
||||
case ConditionField.DATE:
|
||||
return "" # Date is handled separately
|
||||
return ""
|
||||
|
||||
|
||||
def _match(value: str, pattern: str, match_type: MatchType) -> bool:
|
||||
def _match_text(value: str, pattern: str, match_type: MatchType) -> bool:
|
||||
match match_type:
|
||||
case MatchType.CONTAINS:
|
||||
return pattern.lower() in value.lower()
|
||||
@@ -37,17 +40,137 @@ def _match(value: str, pattern: str, match_type: MatchType) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def evaluate_conditions(mail: MailMessage, conditions: list[FilterCondition]) -> bool:
|
||||
if not conditions:
|
||||
def _parse_date_value(value: str) -> datetime | None:
|
||||
for fmt in ("%Y-%m-%d", "%d.%m.%Y", "%Y/%m/%d"):
|
||||
try:
|
||||
return datetime.strptime(value.strip(), fmt)
|
||||
except ValueError:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def _match_date(mail_date: datetime | None, pattern: str, match_type: MatchType) -> bool:
|
||||
if mail_date is None:
|
||||
return False
|
||||
|
||||
now = datetime.utcnow()
|
||||
mail_day = mail_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
match match_type:
|
||||
case MatchType.ON_DATE:
|
||||
target = _parse_date_value(pattern)
|
||||
if not target:
|
||||
return False
|
||||
return mail_day == target
|
||||
|
||||
case MatchType.BEFORE:
|
||||
target = _parse_date_value(pattern)
|
||||
if not target:
|
||||
return False
|
||||
return mail_day < target
|
||||
|
||||
case MatchType.AFTER:
|
||||
target = _parse_date_value(pattern)
|
||||
if not target:
|
||||
return False
|
||||
return mail_day >= target
|
||||
|
||||
case MatchType.DATE_RANGE:
|
||||
# Format: "2024-01-01,2024-12-31"
|
||||
parts = pattern.split(",")
|
||||
if len(parts) != 2:
|
||||
logger.warning("Ungültiges Datumsbereich-Format: %s (erwartet: start,ende)", pattern)
|
||||
return False
|
||||
start = _parse_date_value(parts[0])
|
||||
end = _parse_date_value(parts[1])
|
||||
if not start or not end:
|
||||
return False
|
||||
return start <= mail_day <= end
|
||||
|
||||
case MatchType.YEAR:
|
||||
try:
|
||||
return mail_date.year == int(pattern.strip())
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
case MatchType.LAST_N_DAYS:
|
||||
try:
|
||||
n = int(pattern.strip())
|
||||
cutoff = now - timedelta(days=n)
|
||||
return mail_date >= cutoff
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
case MatchType.LAST_N_WEEKS:
|
||||
try:
|
||||
n = int(pattern.strip())
|
||||
cutoff = now - timedelta(weeks=n)
|
||||
return mail_date >= cutoff
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
case MatchType.LAST_N_MONTHS:
|
||||
try:
|
||||
n = int(pattern.strip())
|
||||
# Approximate: 30 days per month
|
||||
cutoff = now - timedelta(days=n * 30)
|
||||
return mail_date >= cutoff
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
case MatchType.OLDER_THAN_DAYS:
|
||||
try:
|
||||
n = int(pattern.strip())
|
||||
cutoff = now - timedelta(days=n)
|
||||
return mail_date < cutoff
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
case MatchType.OLDER_THAN_WEEKS:
|
||||
try:
|
||||
n = int(pattern.strip())
|
||||
cutoff = now - timedelta(weeks=n)
|
||||
return mail_date < cutoff
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
case MatchType.OLDER_THAN_MONTHS:
|
||||
try:
|
||||
n = int(pattern.strip())
|
||||
cutoff = now - timedelta(days=n * 30)
|
||||
return mail_date < cutoff
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def evaluate_conditions(mail: MailMessage, conditions: list[FilterCondition]) -> tuple[bool, list[str]]:
|
||||
"""Returns (matched, details) where details explains each condition check."""
|
||||
details = []
|
||||
if not conditions:
|
||||
return False, ["Keine Bedingungen definiert"]
|
||||
for cond in conditions:
|
||||
field_value = _get_field_value(mail, cond.field)
|
||||
result = _match(field_value, cond.value, cond.match_type)
|
||||
if cond.field == ConditionField.DATE:
|
||||
mail_date_str = mail.date.strftime("%Y-%m-%d %H:%M") if mail.date else "KEIN DATUM"
|
||||
result = _match_date(mail.date, cond.value, cond.match_type)
|
||||
neg = "NOT " if cond.negate else ""
|
||||
details.append(
|
||||
f"{neg}Datum({mail_date_str}) {cond.match_type.value} '{cond.value}' → {'JA' if result else 'NEIN'}"
|
||||
)
|
||||
else:
|
||||
field_value = _get_field_value(mail, cond.field)
|
||||
result = _match_text(field_value, cond.value, cond.match_type)
|
||||
neg = "NOT " if cond.negate else ""
|
||||
short_val = field_value[:80] + "…" if len(field_value) > 80 else field_value
|
||||
details.append(
|
||||
f"{neg}{cond.field.value}('{short_val}') {cond.match_type.value} '{cond.value}' → {'JA' if result else 'NEIN'}"
|
||||
)
|
||||
if cond.negate:
|
||||
result = not result
|
||||
if not result:
|
||||
return False
|
||||
return True
|
||||
return False, details
|
||||
return True, details
|
||||
|
||||
|
||||
def execute_action(
|
||||
@@ -89,12 +212,18 @@ def apply_rules(
|
||||
smtp_config: dict | None = None,
|
||||
) -> list[dict]:
|
||||
results = []
|
||||
eval_details = []
|
||||
sorted_rules = sorted(rules, key=lambda r: r.priority)
|
||||
|
||||
for rule in sorted_rules:
|
||||
if not rule.enabled:
|
||||
continue
|
||||
if not evaluate_conditions(mail, rule.conditions):
|
||||
|
||||
matched, details = evaluate_conditions(mail, rule.conditions)
|
||||
eval_details.append({"rule": rule.name, "matched": matched, "details": details})
|
||||
|
||||
if not matched:
|
||||
logger.debug("Regel '%s' trifft NICHT zu auf Mail %s: %s", rule.name, mail.uid, details)
|
||||
continue
|
||||
|
||||
logger.info("Regel '%s' trifft auf Mail %s zu (Betreff: %s)", rule.name, mail.uid, mail.subject)
|
||||
@@ -113,4 +242,4 @@ def apply_rules(
|
||||
logger.info("stop_processing aktiv — keine weiteren Regeln für Mail %s", mail.uid)
|
||||
break
|
||||
|
||||
return results
|
||||
return results, eval_details
|
||||
|
||||
+72
-23
@@ -1,9 +1,11 @@
|
||||
import asyncio
|
||||
import email
|
||||
import email.utils
|
||||
import imaplib
|
||||
import logging
|
||||
import smtplib
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from email.header import decode_header
|
||||
from email.message import Message
|
||||
from email.mime.text import MIMEText
|
||||
@@ -19,6 +21,7 @@ class MailMessage:
|
||||
subject: str = ""
|
||||
body: str = ""
|
||||
has_attachment: bool = False
|
||||
date: datetime | None = None
|
||||
raw: Message | None = field(default=None, repr=False)
|
||||
|
||||
|
||||
@@ -35,6 +38,21 @@ def _decode_header_value(value: str | None) -> str:
|
||||
return " ".join(decoded)
|
||||
|
||||
|
||||
def _parse_date(msg: Message) -> datetime | None:
|
||||
date_str = msg.get("Date")
|
||||
if not date_str:
|
||||
return None
|
||||
try:
|
||||
parsed = email.utils.parsedate_to_datetime(date_str)
|
||||
# Convert to naive UTC datetime for consistent comparison
|
||||
if parsed.tzinfo is not None:
|
||||
from datetime import timezone
|
||||
parsed = parsed.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
return parsed
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _has_attachment(msg: Message) -> bool:
|
||||
if not msg.is_multipart():
|
||||
return False
|
||||
@@ -120,44 +138,75 @@ class IMAPClient:
|
||||
return False
|
||||
|
||||
def list_folders(self) -> list[str]:
|
||||
import re
|
||||
status, data = self.conn.list()
|
||||
if status != "OK":
|
||||
return []
|
||||
# IMAP LIST response format: (\Flags) "delimiter" "folder name"
|
||||
# Delimiter can be ".", "/", or other characters
|
||||
pattern = re.compile(r'\(.*?\)\s+"(.?)"\s+(.*)')
|
||||
folders = []
|
||||
for item in data:
|
||||
if isinstance(item, bytes):
|
||||
parts = item.decode("utf-8", errors="replace").split(' "/" ')
|
||||
if len(parts) >= 2:
|
||||
folder_name = parts[-1].strip().strip('"')
|
||||
line = item.decode("utf-8", errors="replace")
|
||||
match = pattern.match(line)
|
||||
if match:
|
||||
folder_name = match.group(2).strip().strip('"')
|
||||
folders.append(folder_name)
|
||||
return folders
|
||||
else:
|
||||
logger.debug("Konnte IMAP LIST Zeile nicht parsen: %s", line)
|
||||
logger.info("IMAP Ordner geladen: %d Ordner gefunden", len(folders))
|
||||
return sorted(folders)
|
||||
|
||||
def create_folder(self, folder_name: str) -> bool:
|
||||
try:
|
||||
status, _ = self.conn.create(folder_name)
|
||||
if status == "OK":
|
||||
logger.info("Ordner erstellt: %s", folder_name)
|
||||
return True
|
||||
logger.error("Ordner erstellen fehlgeschlagen: %s", folder_name)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error("Fehler beim Erstellen von Ordner '%s': %s", folder_name, e)
|
||||
return False
|
||||
|
||||
def fetch_unseen(self, folder: str = "INBOX") -> list[MailMessage]:
|
||||
"""Legacy: Fetch unseen mails. Use fetch_all_uids + fetch_mail for processed-tracking."""
|
||||
return self.fetch_mails_by_uids(folder, self.get_all_uids(folder, search="UNSEEN"))
|
||||
|
||||
def get_all_uids(self, folder: str = "INBOX", search: str = "ALL") -> list[str]:
|
||||
self.conn.select(folder)
|
||||
status, data = self.conn.uid("SEARCH", None, "UNSEEN")
|
||||
status, data = self.conn.uid("SEARCH", None, search)
|
||||
if status != "OK" or not data[0]:
|
||||
return []
|
||||
return [uid.decode() if isinstance(uid, bytes) else str(uid) for uid in data[0].split()]
|
||||
|
||||
uids = data[0].split()
|
||||
def fetch_mail(self, uid: str) -> MailMessage | None:
|
||||
status, msg_data = self.conn.uid("FETCH", uid, "(RFC822)")
|
||||
if status != "OK" or not msg_data[0]:
|
||||
return None
|
||||
raw_email = msg_data[0][1]
|
||||
msg = email.message_from_bytes(raw_email)
|
||||
return MailMessage(
|
||||
uid=uid,
|
||||
from_addr=_decode_header_value(msg.get("From")),
|
||||
to_addr=_decode_header_value(msg.get("To")),
|
||||
subject=_decode_header_value(msg.get("Subject")),
|
||||
body=_extract_body(msg),
|
||||
has_attachment=_has_attachment(msg),
|
||||
date=_parse_date(msg),
|
||||
raw=msg,
|
||||
)
|
||||
|
||||
def fetch_mails_by_uids(self, folder: str, uids: list[str]) -> list[MailMessage]:
|
||||
if not uids:
|
||||
return []
|
||||
self.conn.select(folder)
|
||||
messages = []
|
||||
for uid in uids:
|
||||
uid_str = uid.decode() if isinstance(uid, bytes) else str(uid)
|
||||
status, msg_data = self.conn.uid("FETCH", uid_str, "(RFC822)")
|
||||
if status != "OK" or not msg_data[0]:
|
||||
continue
|
||||
raw_email = msg_data[0][1]
|
||||
msg = email.message_from_bytes(raw_email)
|
||||
messages.append(
|
||||
MailMessage(
|
||||
uid=uid_str,
|
||||
from_addr=_decode_header_value(msg.get("From")),
|
||||
to_addr=_decode_header_value(msg.get("To")),
|
||||
subject=_decode_header_value(msg.get("Subject")),
|
||||
body=_extract_body(msg),
|
||||
has_attachment=_has_attachment(msg),
|
||||
raw=msg,
|
||||
)
|
||||
)
|
||||
mail = self.fetch_mail(uid)
|
||||
if mail:
|
||||
messages.append(mail)
|
||||
return messages
|
||||
|
||||
def move_mail(self, uid: str, target_folder: str) -> bool:
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import SessionLocal
|
||||
from app.models.db_models import FilterLog, LogLevel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def write_log(
|
||||
message: str,
|
||||
level: LogLevel = LogLevel.INFO,
|
||||
account_id: int | None = None,
|
||||
account_name: str = "",
|
||||
rule_name: str | None = None,
|
||||
action_type: str | None = None,
|
||||
mail_uid: str | None = None,
|
||||
mail_subject: str | None = None,
|
||||
mail_from: str | None = None,
|
||||
folder: str | None = None,
|
||||
details: str | None = None,
|
||||
db: Session | None = None,
|
||||
) -> None:
|
||||
close_db = False
|
||||
if db is None:
|
||||
db = SessionLocal()
|
||||
close_db = True
|
||||
try:
|
||||
entry = FilterLog(
|
||||
account_id=account_id,
|
||||
account_name=account_name,
|
||||
level=level,
|
||||
message=message,
|
||||
rule_name=rule_name,
|
||||
action_type=action_type,
|
||||
mail_uid=mail_uid,
|
||||
mail_subject=mail_subject[:500] if mail_subject else None,
|
||||
mail_from=mail_from[:255] if mail_from else None,
|
||||
folder=folder,
|
||||
details=details[:2000] if details else None,
|
||||
)
|
||||
db.add(entry)
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
logger.error("Fehler beim Schreiben des Logs: %s", e)
|
||||
db.rollback()
|
||||
finally:
|
||||
if close_db:
|
||||
db.close()
|
||||
|
||||
|
||||
def cleanup_old_logs(days: int = 30, db: Session | None = None) -> int:
|
||||
close_db = False
|
||||
if db is None:
|
||||
db = SessionLocal()
|
||||
close_db = True
|
||||
try:
|
||||
cutoff = datetime.utcnow() - timedelta(days=days)
|
||||
count = db.query(FilterLog).filter(FilterLog.created_at < cutoff).delete()
|
||||
db.commit()
|
||||
return count
|
||||
finally:
|
||||
if close_db:
|
||||
db.close()
|
||||
+193
-19
@@ -6,10 +6,11 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import SessionLocal
|
||||
from app.models.db_models import Account, FilterRule
|
||||
from app.models.db_models import Account, FilterRule, LogLevel, ProcessedMail
|
||||
from app.services.encryption import decrypt
|
||||
from app.services.filter_engine import apply_rules
|
||||
from app.services.imap_client import IMAPClient
|
||||
from app.services.log_service import cleanup_old_logs, write_log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -40,11 +41,21 @@ def _poll_account_sync(account_id: int) -> None:
|
||||
.order_by(FilterRule.priority)
|
||||
.all()
|
||||
)
|
||||
|
||||
write_log(
|
||||
message=f"Poll gestartet ({len(rules)} aktive Regel(n))",
|
||||
level=LogLevel.INFO,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
details=", ".join(r.name for r in rules) if rules else "Keine Regeln konfiguriert",
|
||||
db=db,
|
||||
)
|
||||
|
||||
if not rules:
|
||||
logger.debug("Keine aktiven Regeln für Konto '%s'", account.name)
|
||||
account.last_poll_at = datetime.utcnow()
|
||||
db.commit()
|
||||
return
|
||||
|
||||
# Collect unique source folders
|
||||
source_folders = list({r.source_folder for r in rules})
|
||||
smtp_config = _build_smtp_config(account)
|
||||
|
||||
@@ -56,36 +67,189 @@ def _poll_account_sync(account_id: int) -> None:
|
||||
use_ssl=account.use_ssl,
|
||||
)
|
||||
|
||||
total_mails = 0
|
||||
total_new = 0
|
||||
total_matched = 0
|
||||
total_actions = 0
|
||||
total_errors = 0
|
||||
|
||||
with client:
|
||||
for folder in source_folders:
|
||||
folder_rules = [r for r in rules if r.source_folder == folder]
|
||||
|
||||
# Alle UIDs im Ordner holen
|
||||
try:
|
||||
messages = client.fetch_unseen(folder)
|
||||
all_uids = client.get_all_uids(folder, search="ALL")
|
||||
except Exception as e:
|
||||
logger.error("Fehler beim Abrufen von %s/%s: %s", account.name, folder, e)
|
||||
write_log(
|
||||
message=f"Fehler beim Abrufen von Ordner '{folder}'",
|
||||
level=LogLevel.ERROR,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
folder=folder,
|
||||
details=str(e),
|
||||
db=db,
|
||||
)
|
||||
continue
|
||||
|
||||
if messages:
|
||||
logger.info(
|
||||
"Konto '%s', Ordner '%s': %d ungelesene Mails",
|
||||
account.name, folder, len(messages),
|
||||
)
|
||||
total_mails += len(all_uids)
|
||||
|
||||
for mail in messages:
|
||||
results = apply_rules(client, mail, folder_rules, smtp_config)
|
||||
for r in results:
|
||||
level = logging.INFO if r["success"] else logging.ERROR
|
||||
logger.log(
|
||||
level,
|
||||
"Konto '%s': %s %s -> %s (%s)",
|
||||
account.name, r["action"], r.get("parameter", ""),
|
||||
"OK" if r["success"] else "FEHLER", r["rule"],
|
||||
# Bereits verarbeitete UIDs aus DB laden
|
||||
processed_uids = set(
|
||||
row[0] for row in db.query(ProcessedMail.mail_uid)
|
||||
.filter(
|
||||
ProcessedMail.account_id == account.id,
|
||||
ProcessedMail.folder == folder,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Neue (unverarbeitete) UIDs ermitteln
|
||||
new_uids = [uid for uid in all_uids if uid not in processed_uids]
|
||||
total_new += len(new_uids)
|
||||
|
||||
if not new_uids:
|
||||
write_log(
|
||||
message=f"Keine neuen Mails in '{folder}' ({len(all_uids)} gesamt, alle bereits verarbeitet)",
|
||||
level=LogLevel.INFO,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
folder=folder,
|
||||
db=db,
|
||||
)
|
||||
continue
|
||||
|
||||
write_log(
|
||||
message=f"{len(new_uids)} neue Mail(s) in '{folder}' ({len(all_uids)} gesamt, {len(processed_uids)} bereits verarbeitet)",
|
||||
level=LogLevel.INFO,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
folder=folder,
|
||||
db=db,
|
||||
)
|
||||
|
||||
# Neue Mails abrufen und verarbeiten
|
||||
for uid in new_uids:
|
||||
try:
|
||||
mail = client.fetch_mail(uid)
|
||||
except Exception as e:
|
||||
write_log(
|
||||
message=f"Fehler beim Abrufen von Mail {uid}",
|
||||
level=LogLevel.ERROR,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
mail_uid=uid,
|
||||
folder=folder,
|
||||
details=str(e),
|
||||
db=db,
|
||||
)
|
||||
continue
|
||||
|
||||
if not mail:
|
||||
continue
|
||||
|
||||
results, eval_details = apply_rules(client, mail, folder_rules, smtp_config)
|
||||
|
||||
# Eval-Details für Log aufbereiten
|
||||
eval_summary = []
|
||||
for ev in eval_details:
|
||||
status = "TREFFER" if ev["matched"] else "kein Treffer"
|
||||
checks = " | ".join(ev["details"])
|
||||
eval_summary.append(f"Regel '{ev['rule']}': {status} [{checks}]")
|
||||
|
||||
if not results:
|
||||
write_log(
|
||||
message=f"Keine Regel trifft zu",
|
||||
level=LogLevel.INFO,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
mail_uid=mail.uid,
|
||||
mail_subject=mail.subject,
|
||||
mail_from=mail.from_addr,
|
||||
folder=folder,
|
||||
details="\n".join(eval_summary),
|
||||
db=db,
|
||||
)
|
||||
else:
|
||||
total_matched += 1
|
||||
for r in results:
|
||||
action_label = r["action"]
|
||||
param = r.get("parameter", "")
|
||||
if param:
|
||||
action_label += f" → {param}"
|
||||
|
||||
if r["success"]:
|
||||
total_actions += 1
|
||||
write_log(
|
||||
message=f"Aktion ausgeführt: {action_label}",
|
||||
level=LogLevel.SUCCESS,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
rule_name=r["rule"],
|
||||
action_type=r["action"],
|
||||
mail_uid=r["mail_uid"],
|
||||
mail_subject=mail.subject,
|
||||
mail_from=mail.from_addr,
|
||||
folder=folder,
|
||||
details=param,
|
||||
db=db,
|
||||
)
|
||||
else:
|
||||
total_errors += 1
|
||||
write_log(
|
||||
message=f"Aktion fehlgeschlagen: {action_label}",
|
||||
level=LogLevel.ERROR,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
rule_name=r["rule"],
|
||||
action_type=r["action"],
|
||||
mail_uid=r["mail_uid"],
|
||||
mail_subject=mail.subject,
|
||||
mail_from=mail.from_addr,
|
||||
folder=folder,
|
||||
details=param,
|
||||
db=db,
|
||||
)
|
||||
|
||||
# Mail als verarbeitet markieren
|
||||
db.add(ProcessedMail(
|
||||
account_id=account.id,
|
||||
folder=folder,
|
||||
mail_uid=mail.uid,
|
||||
mail_subject=mail.subject[:500] if mail.subject else None,
|
||||
mail_from=mail.from_addr[:255] if mail.from_addr else None,
|
||||
))
|
||||
db.flush()
|
||||
|
||||
# Poll-Zusammenfassung
|
||||
summary_parts = [
|
||||
f"{total_mails} Mail(s) im Ordner",
|
||||
f"{total_new} neu",
|
||||
f"{total_matched} Treffer",
|
||||
f"{total_actions} Aktion(en)",
|
||||
]
|
||||
if total_errors > 0:
|
||||
summary_parts.append(f"{total_errors} Fehler")
|
||||
|
||||
write_log(
|
||||
message=f"Poll abgeschlossen: {', '.join(summary_parts)}",
|
||||
level=LogLevel.ERROR if total_errors > 0 else LogLevel.SUCCESS if total_actions > 0 else LogLevel.INFO,
|
||||
account_id=account.id,
|
||||
account_name=account.name,
|
||||
db=db,
|
||||
)
|
||||
|
||||
account.last_poll_at = datetime.utcnow()
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
logger.error("Fehler beim Polling von Konto %s: %s", account_id, e)
|
||||
db.rollback()
|
||||
write_log(
|
||||
message=f"Polling fehlgeschlagen",
|
||||
level=LogLevel.ERROR,
|
||||
account_id=account_id,
|
||||
details=str(e),
|
||||
)
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
@@ -128,6 +292,16 @@ def start_scheduler() -> None:
|
||||
add_account_job(account)
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
# Täglicher Cleanup alter Logs
|
||||
scheduler.add_job(
|
||||
lambda: asyncio.get_event_loop().run_in_executor(None, cleanup_old_logs, 30),
|
||||
"interval",
|
||||
hours=24,
|
||||
id="cleanup_logs",
|
||||
replace_existing=True,
|
||||
)
|
||||
|
||||
scheduler.start()
|
||||
logger.info("Scheduler gestartet mit %d Jobs", len(scheduler.get_jobs()))
|
||||
|
||||
|
||||
Reference in New Issue
Block a user