Auto-commit local changes before build (2026-01-01 16:09:56)

This commit is contained in:
Ivo Oskamp 2026-01-01 16:09:56 +01:00
parent 673db45f16
commit 951382e580
89 changed files with 21514 additions and 0 deletions

0
.gitignore vendored Normal file
View File

0
LICENSE Normal file
View File

0
README.md Normal file
View File

281
build-and-push.sh Normal file
View File

@ -0,0 +1,281 @@
#!/usr/bin/env bash
set -euo pipefail
# ============================================================================
# build-and-push.sh
# Location: repo root (e.g. /docker/develop/backup-monitoring)
#
# Purpose:
# - Automatic version bump:
# 1 = patch, 2 = minor, 3 = major, t = test
# - Test builds: only update :dev (no commit/tag)
# - Release builds: update version.txt, commit, tag, push (to the current branch)
# - Build & push Docker images for each service under ./compose/*
# - Preflight checks: Docker daemon up, logged in to registry, valid names/tags
# - Summary: show all images + tags built and pushed
# - Branch visibility:
# - Shows currently checked out branch (authoritative)
# - Reads .last-branch for info (if present) when BRANCH is not set
# - Writes the current branch back to .last-branch at the end
#
# Usage:
# BRANCH=<branch> ./build-and-push.sh [bump] # BRANCH is optional; informative only
# ./build-and-push.sh [bump]
# If [bump] is omitted, you will be prompted (default = t).
#
# Tagging rules:
# - Release build (1/2/3): push :<version>, :dev, :latest
# - Test build (t): push only :dev (no :latest, no version tag)
# ============================================================================
DOCKER_REGISTRY="gitea.oskamp.info"
DOCKER_NAMESPACE="ivooskamp"
VERSION_FILE="version.txt"
START_VERSION="v0.1.0"
COMPOSE_DIR="containers"
LAST_BRANCH_FILE=".last-branch" # stored in repo root
# --- Input: prompt if missing ------------------------------------------------
BUMP="${1:-}"
if [[ -z "${BUMP}" ]]; then
echo "Select bump type: [1] patch, [2] minor, [3] major, [t] test (default: t)"
read -r BUMP
BUMP="${BUMP:-t}"
fi
if [[ "$BUMP" != "1" && "$BUMP" != "2" && "$BUMP" != "3" && "$BUMP" != "t" ]]; then
echo "[ERROR] Unknown bump type '$BUMP' (use 1, 2, 3, or t)."
exit 1
fi
# --- Helpers -----------------------------------------------------------------
read_version() {
if [[ -f "$VERSION_FILE" ]]; then
tr -d ' \t\n\r' < "$VERSION_FILE"
else
echo "$START_VERSION"
fi
}
write_version() {
echo "$1" > "$VERSION_FILE"
}
bump_version() {
local cur="$1"
local kind="$2"
local core="${cur#v}"
IFS='.' read -r MA MI PA <<< "$core"
case "$kind" in
1) PA=$((PA + 1));;
2) MI=$((MI + 1)); PA=0;;
3) MA=$((MA + 1)); MI=0; PA=0;;
*) echo "[ERROR] Unknown bump kind"; exit 1;;
esac
echo "v${MA}.${MI}.${PA}"
}
check_docker_ready() {
if ! docker info >/dev/null 2>&1; then
echo "[ERROR] Docker daemon not reachable. Is Docker running and do you have permission to use it?"
exit 1
fi
}
ensure_registry_login() {
local cfg="${HOME}/.docker/config.json"
if [[ ! -f "$cfg" ]]; then
echo "[ERROR] Docker config not found at $cfg. Please login: docker login ${DOCKER_REGISTRY}"
exit 1
fi
if ! grep -q "\"${DOCKER_REGISTRY}\"" "$cfg"; then
echo "[ERROR] No registry auth found for ${DOCKER_REGISTRY}. Please run: docker login ${DOCKER_REGISTRY}"
exit 1
fi
}
validate_repo_component() {
local comp="$1"
if [[ ! "$comp" =~ ^[a-z0-9]+([._-][a-z0-9]+)*$ ]]; then
echo "[ERROR] Invalid repository component '$comp'."
echo " Must match: ^[a-z0-9]+([._-][a-z0-9]+)*$ (lowercase, digits, ., _, - as separators)."
return 1
fi
}
validate_tag() {
local tag="$1"
local len="${#tag}"
if (( len < 1 || len > 128 )); then
echo "[ERROR] Invalid tag length ($len). Must be between 1 and 128 characters."
return 1
fi
if [[ ! "$tag" =~ ^[A-Za-z0-9_][A-Za-z0-9_.-]*$ ]]; then
echo "[ERROR] Invalid tag '$tag'. Allowed: [A-Za-z0-9_.-], must start with alphanumeric or underscore."
return 1
fi
}
# --- Preflight ---------------------------------------------------------------
if [[ ! -d ".git" ]]; then
echo "[ERROR] Not a git repository (.git missing)."
exit 1
fi
if [[ ! -d "$COMPOSE_DIR" ]]; then
echo "[ERROR] '$COMPOSE_DIR' directory missing. Expected ./compose/<service>/ with a Dockerfile."
exit 1
fi
check_docker_ready
ensure_registry_login
validate_repo_component "$DOCKER_NAMESPACE"
# Detect currently checked out branch (authoritative for this script)
DETECTED_BRANCH="$(git branch --show-current 2>/dev/null || true)"
if [[ -z "$DETECTED_BRANCH" ]]; then
DETECTED_BRANCH="$(git symbolic-ref --quiet --short HEAD 2>/dev/null || true)"
fi
if [[ -z "$DETECTED_BRANCH" ]]; then
# Try to derive from upstream
UPSTREAM_REF_DERIVED="$(git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null || true)"
if [[ -n "$UPSTREAM_REF_DERIVED" ]]; then
DETECTED_BRANCH="${UPSTREAM_REF_DERIVED#origin/}"
fi
fi
if [[ -z "$DETECTED_BRANCH" ]]; then
DETECTED_BRANCH="main"
fi
# Optional signals: BRANCH env and .last-branch (informational only)
ENV_BRANCH="${BRANCH:-}"
LAST_BRANCH_FILE_PATH="$(pwd)/$LAST_BRANCH_FILE"
LAST_BRANCH_VALUE=""
if [[ -z "$ENV_BRANCH" && -f "$LAST_BRANCH_FILE_PATH" ]]; then
LAST_BRANCH_VALUE="$(tr -d ' \t\n\r' < "$LAST_BRANCH_FILE_PATH")"
fi
UPSTREAM_REF="$(git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null || echo "origin/$DETECTED_BRANCH")"
HEAD_SHA="$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")"
echo "[INFO] Repo: $(pwd)"
echo "[INFO] Current branch: $DETECTED_BRANCH"
echo "[INFO] Upstream: $UPSTREAM_REF"
echo "[INFO] HEAD (sha): $HEAD_SHA"
if [[ -n "$ENV_BRANCH" && "$ENV_BRANCH" != "$DETECTED_BRANCH" ]]; then
echo "[WARNING] BRANCH='$ENV_BRANCH' differs from checked out branch '$DETECTED_BRANCH'."
echo "[WARNING] This script does not switch branches; continuing on '$DETECTED_BRANCH'."
fi
if [[ -n "$LAST_BRANCH_VALUE" && "$LAST_BRANCH_VALUE" != "$DETECTED_BRANCH" && -z "$ENV_BRANCH" ]]; then
echo "[INFO] .last-branch suggests '$LAST_BRANCH_VALUE', but current checkout is '$DETECTED_BRANCH'."
echo "[INFO] If you intended to build '$LAST_BRANCH_VALUE', switch branches first (use update-and-build.sh)."
fi
# --- Versioning --------------------------------------------------------------
CURRENT_VERSION="$(read_version)"
NEW_VERSION="$CURRENT_VERSION"
DO_TAG_AND_BUMP=true
if [[ "$BUMP" == "t" ]]; then
echo "[INFO] Test build: keeping version $CURRENT_VERSION; will only update :dev."
DO_TAG_AND_BUMP=false
else
NEW_VERSION="$(bump_version "$CURRENT_VERSION" "$BUMP")"
echo "[INFO] New version: $NEW_VERSION"
fi
if $DO_TAG_AND_BUMP; then
validate_tag "$NEW_VERSION"
validate_tag "latest"
fi
validate_tag "dev"
# --- Version update + VCS ops (release builds only) --------------------------
if $DO_TAG_AND_BUMP; then
echo "[INFO] Writing $NEW_VERSION to $VERSION_FILE"
write_version "$NEW_VERSION"
echo "[INFO] Git add + commit (branch: $DETECTED_BRANCH)"
git add "$VERSION_FILE"
git commit -m "Release $NEW_VERSION on branch $DETECTED_BRANCH (bump type $BUMP)"
echo "[INFO] Git tag $NEW_VERSION"
git tag -a "$NEW_VERSION" -m "Release $NEW_VERSION"
echo "[INFO] Git push + tags"
git push origin "$DETECTED_BRANCH"
git push --tags
else
echo "[INFO] Skipping commit/tagging (test build)."
fi
# --- Build & push per service ------------------------------------------------
shopt -s nullglob
services=( "$COMPOSE_DIR"/* )
if [[ ${#services[@]} -eq 0 ]]; then
echo "[ERROR] No services found under $COMPOSE_DIR"
exit 1
fi
BUILT_IMAGES=()
for svc_path in "${services[@]}"; do
[[ -d "$svc_path" ]] || continue
svc="$(basename "$svc_path")"
dockerfile="$svc_path/Dockerfile"
validate_repo_component "$svc"
if [[ ! -f "$dockerfile" ]]; then
echo "[WARNING] Skipping '${svc}': Dockerfile not found in ${svc_path}"
continue
fi
IMAGE_BASE="${DOCKER_REGISTRY}/${DOCKER_NAMESPACE}/${svc}"
if $DO_TAG_AND_BUMP; then
echo "============================================================"
echo "[INFO] Building ${svc} -> tags: ${NEW_VERSION}, dev, latest"
echo "============================================================"
docker build \
-t "${IMAGE_BASE}:${NEW_VERSION}" \
-t "${IMAGE_BASE}:dev" \
-t "${IMAGE_BASE}:latest" \
"$svc_path"
docker push "${IMAGE_BASE}:${NEW_VERSION}"
docker push "${IMAGE_BASE}:dev"
docker push "${IMAGE_BASE}:latest"
BUILT_IMAGES+=("${IMAGE_BASE}:${NEW_VERSION}" "${IMAGE_BASE}:dev" "${IMAGE_BASE}:latest")
else
echo "============================================================"
echo "[INFO] Test build ${svc} -> tag: dev"
echo "============================================================"
docker build -t "${IMAGE_BASE}:dev" "$svc_path"
docker push "${IMAGE_BASE}:dev"
BUILT_IMAGES+=("${IMAGE_BASE}:dev")
fi
done
# --- Persist current branch to .last-branch ----------------------------------
# (This helps script 1 to preselect next time, and is informative if you run script 2 standalone)
echo "$DETECTED_BRANCH" > "$LAST_BRANCH_FILE_PATH"
# --- Summary -----------------------------------------------------------------
echo ""
echo "============================================================"
echo "[SUMMARY] Build & push complete (branch: $DETECTED_BRANCH)"
if $DO_TAG_AND_BUMP; then
echo "[INFO] Release version: $NEW_VERSION"
else
echo "[INFO] Test build (no version bump)"
fi
echo "[INFO] Images pushed:"
for img in "${BUILT_IMAGES[@]}"; do
echo " - $img"
done
echo "============================================================"

View File

@ -0,0 +1,20 @@
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
WORKDIR /app
COPY requirements.txt ./requirements.txt
RUN pip install --no-cache-dir -r requirements.txt
COPY src ./src
ENV PYTHONPATH=/app/src
ENV APP_PORT=8080
EXPOSE 8080
# Use the application factory from backend.app
CMD ["gunicorn", "-b", "0.0.0.0:8080", "backend.app:create_app()"]

View File

@ -0,0 +1,8 @@
Flask==3.0.3
Flask-SQLAlchemy==3.1.1
Flask-Migrate==4.0.7
Flask-Login==0.6.3
psycopg2-binary==2.9.9
python-dateutil==2.9.0.post0
gunicorn==23.0.0
requests==2.32.3

View File

@ -0,0 +1 @@
# backend package init

View File

@ -0,0 +1,123 @@
import os
from flask import Flask, redirect, request, session, url_for
from flask_migrate import Migrate
from flask_login import current_user
from .config import Config
from .database import db
from .models import User # noqa: F401
from .auth import login_manager
from .auth.routes import auth_bp
from .main.routes import main_bp
from .migrations import run_migrations
from .auto_importer_service import start_auto_importer
def _get_today_ui_date() -> str:
"""Return today's date (YYYY-MM-DD) in the configured UI timezone.
Falls back to Europe/Amsterdam if no setting is available.
"""
from datetime import datetime
try:
from zoneinfo import ZoneInfo
except Exception:
ZoneInfo = None # type: ignore
tz_name = "Europe/Amsterdam"
try:
from .models import SystemSettings
settings = SystemSettings.query.first()
if settings and getattr(settings, "ui_timezone", None):
tz_name = settings.ui_timezone
except Exception:
tz_name = "Europe/Amsterdam"
if ZoneInfo:
try:
tz = ZoneInfo(tz_name)
return datetime.now(tz).date().isoformat()
except Exception:
pass
return datetime.utcnow().date().isoformat()
def create_app():
base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
templates_dir = os.path.join(base_dir, "templates")
static_dir = os.path.join(base_dir, "static")
app = Flask(__name__, template_folder=templates_dir, static_folder=static_dir)
config = Config()
app.config.from_object(config)
db.init_app(app)
Migrate(app, db)
login_manager.init_app(app)
app.register_blueprint(auth_bp)
app.register_blueprint(main_bp)
@app.before_request
def _redirect_to_dashboard_on_first_open_each_day():
"""Redirect the first authenticated page view of the day to the dashboard.
This ensures that when a user opens the site for the first time each day,
they land on the dashboard regardless of the bookmarked/deeplinked URL.
"""
# Only for normal page loads.
if request.method != "GET":
return None
# Do not interfere with static assets.
if request.path.startswith("/static"):
return None
# Do not interfere with API calls.
if request.path.startswith("/api/"):
return None
# Only for authenticated users.
try:
if not current_user or not current_user.is_authenticated:
return None
except Exception:
return None
# Exempt auth blueprint routes and the dashboard itself.
endpoint = request.endpoint or ""
if endpoint.startswith("auth."):
return None
if endpoint == "main.dashboard":
# Mark dashboard as seen for today.
session["daily_dashboard_seen"] = _get_today_ui_date()
return None
today = _get_today_ui_date()
seen = (session.get("daily_dashboard_seen") or "").strip()
if seen != today:
session["daily_dashboard_seen"] = today
return redirect(url_for("main.dashboard"))
return None
@app.get("/health")
def health():
return {"status": "ok"}
with app.app_context():
print("[app] Initializing database and running migrations...")
db.create_all()
run_migrations()
print("[app] Database initialization and migrations finished.")
# Start automatic mail importer background thread
start_auto_importer(app)
return app

View File

@ -0,0 +1,56 @@
from __future__ import annotations
from datetime import datetime, timedelta
from typing import Optional
from flask_login import current_user
from .database import db
from .models import AdminLog
def log_admin_event(
event_type: str,
message: str,
details: Optional[str] = None,
*,
username: Optional[str] = None,
commit: bool = True,
) -> None:
"""Write an entry to the in-app AdminLog table.
- This is the source for the /logging page in the website (not container logs).
- Retention: keep only the last 7 days.
- If commit=False, the caller is responsible for committing/rolling back.
"""
# Resolve username (prefer explicit)
if username is None:
try:
username = current_user.username if getattr(current_user, "is_authenticated", False) else None
except Exception:
username = None
entry = AdminLog(
user=username,
event_type=(event_type or "event")[:64],
message=(message or "")[:2000],
details=details,
)
db.session.add(entry)
# Enforce retention: keep only the last 7 days
try:
cutoff = datetime.utcnow() - timedelta(days=7)
AdminLog.query.filter(AdminLog.created_at < cutoff).delete(synchronize_session=False)
except Exception:
# Never block the main action because of retention cleanup.
pass
if not commit:
return
try:
db.session.commit()
except Exception:
# If logging fails, do not raise and do not print to container logs.
db.session.rollback()

View File

@ -0,0 +1,12 @@
from flask_login import LoginManager
from ..models import User
login_manager = LoginManager()
login_manager.login_view = "auth.login"
@login_manager.user_loader
def load_user(user_id: str):
if not user_id:
return None
return User.query.get(int(user_id))

View File

@ -0,0 +1,156 @@
import random
from functools import wraps
from flask import (
Blueprint,
render_template,
redirect,
url_for,
flash,
request,
session,
)
from flask_login import login_user, logout_user, login_required, current_user
from ..database import db
from ..models import User
auth_bp = Blueprint("auth", __name__, url_prefix="/auth")
def admin_exists() -> bool:
return db.session.query(User.id).filter_by(role="admin").first() is not None
def generate_captcha():
a = random.randint(1, 9)
b = random.randint(1, 9)
question = f"{a} + {b}"
answer = str(a + b)
return question, answer
def captcha_required(func):
@wraps(func)
def wrapper(*args, **kwargs):
if request.method == "POST":
expected = session.get("captcha_answer")
provided = (request.form.get("captcha") or "").strip()
if not expected or provided != expected:
flash("Invalid captcha answer. Please try again.", "danger")
# regenerate captcha for re-render
question, answer = generate_captcha()
session["captcha_answer"] = answer
return render_template(
"auth/login.html",
captcha_question=question,
username=request.form.get("username", ""),
)
return func(*args, **kwargs)
return wrapper
@auth_bp.route("/login", methods=["GET", "POST"])
@captcha_required
def login():
if request.method == "GET":
if not admin_exists():
return redirect(url_for("auth.initial_setup"))
question, answer = generate_captcha()
session["captcha_answer"] = answer
return render_template("auth/login.html", captcha_question=question)
# POST
username = (request.form.get("username") or "").strip()
password = request.form.get("password") or ""
user = User.query.filter_by(username=username).first()
if not user or not user.check_password(password):
flash("Invalid username or password.", "danger")
question, answer = generate_captcha()
session["captcha_answer"] = answer
return render_template(
"auth/login.html", captcha_question=question, username=username
)
login_user(user)
try:
session["active_role"] = user.roles[0]
except Exception:
session["active_role"] = (getattr(user, "role", "viewer") or "viewer").split(",")[0].strip() or "viewer"
flash("You are now logged in.", "success")
return redirect(url_for("main.dashboard"))
@auth_bp.route("/logout")
@login_required
def logout():
logout_user()
try:
session.pop("active_role", None)
except Exception:
pass
flash("You have been logged out.", "info")
return redirect(url_for("auth.login"))
@auth_bp.route("/initial-setup", methods=["GET", "POST"])
def initial_setup():
if admin_exists():
flash("An admin user already exists. Please log in.", "info")
return redirect(url_for("auth.login"))
if request.method == "POST":
username = (request.form.get("username") or "").strip()
password = request.form.get("password") or ""
confirm = request.form.get("confirm_password") or ""
if not username or not password:
flash("Username and password are required.", "danger")
return render_template("auth/initial_setup.html", username=username)
if password != confirm:
flash("Passwords do not match.", "danger")
return render_template("auth/initial_setup.html", username=username)
existing = User.query.filter_by(username=username).first()
if existing:
flash("A user with this username already exists.", "danger")
return render_template("auth/initial_setup.html", username=username)
user = User(username=username, role="admin")
user.set_password(password)
db.session.add(user)
db.session.commit()
flash("Admin user created. You can now log in.", "success")
return redirect(url_for("auth.login"))
return render_template("auth/initial_setup.html")
@auth_bp.route("/password-reset", methods=["GET", "POST"])
def password_reset_request():
# Simple placeholder implementation with captcha so the flow exists.
question, answer = generate_captcha()
session["captcha_answer"] = answer
if request.method == "POST":
expected = session.get("captcha_answer")
provided = (request.form.get("captcha") or "").strip()
if not expected or provided != expected:
flash("Invalid captcha answer. Please try again.", "danger")
question, answer = generate_captcha()
session["captcha_answer"] = answer
return render_template(
"auth/password_reset_request.html", captcha_question=question
)
flash("Password reset functionality is not yet implemented.", "info")
return redirect(url_for("auth.login"))
return render_template(
"auth/password_reset_request.html", captcha_question=question
)

View File

@ -0,0 +1,120 @@
from __future__ import annotations
import threading
import time
from datetime import datetime
from .admin_logging import log_admin_event
from .mail_importer import MailImportError, run_auto_import
from .models import SystemSettings
from .object_persistence import persist_objects_for_approved_run
_AUTO_IMPORTER_THREAD_NAME = "auto_importer"
def start_auto_importer(app) -> None:
"""Start the automatic importer background thread.
The thread is lightweight and checks settings on every loop.
It only runs imports when enabled and the interval has elapsed.
"""
# Avoid starting multiple threads if create_app() is called more than once.
if any(t.name == _AUTO_IMPORTER_THREAD_NAME for t in threading.enumerate()):
return
def _worker() -> None:
last_run_at: datetime | None = None
while True:
try:
with app.app_context():
settings = SystemSettings.query.first()
if settings is None:
time.sleep(10)
continue
enabled = bool(getattr(settings, "auto_import_enabled", False))
try:
interval_minutes = int(getattr(settings, "auto_import_interval_minutes", 15) or 15)
except (TypeError, ValueError):
interval_minutes = 15
if interval_minutes < 1:
interval_minutes = 1
now = datetime.utcnow()
due = False
if enabled:
if last_run_at is None:
due = True
else:
due = (now - last_run_at).total_seconds() >= (interval_minutes * 60)
if not due:
time.sleep(5)
continue
# Always enforce fixed batch size for automatic import.
try:
total_fetched, new_messages, auto_approved, auto_approved_runs, errors = run_auto_import(settings)
except MailImportError as exc:
log_admin_event("mail_import_auto_error", f"Automatic mail import failed: {exc}")
last_run_at = now
time.sleep(5)
continue
except Exception as exc:
log_admin_event("mail_import_auto_error", f"Unexpected error during automatic mail import: {exc}")
last_run_at = now
time.sleep(5)
continue
log_admin_event(
"mail_import_auto",
f"Automatic mail import finished. fetched={total_fetched}, new={new_messages}, auto_approved={auto_approved}, errors={len(errors)}",
)
# Persist objects for auto-approved runs (must not block the thread)
if auto_approved_runs:
persisted_objects = 0
persisted_errors = 0
for (customer_id, job_id, run_id, mail_message_id) in auto_approved_runs:
try:
persisted_objects += persist_objects_for_approved_run(
int(customer_id), int(job_id), int(run_id), int(mail_message_id)
)
except Exception as exc:
persisted_errors += 1
log_admin_event(
"object_persist_error",
f"Object persistence failed for auto-approved message {mail_message_id} (job {job_id}, run {run_id}): {exc}",
)
log_admin_event(
"object_persist_auto_approve",
f"Persisted objects for auto-approved runs (auto import). runs={len(auto_approved_runs)}, objects={persisted_objects}, errors={persisted_errors}",
)
# Store only a short summary of errors (the rest is already visible in the UI)
if errors:
log_admin_event(
"mail_import_auto_errors",
f"Automatic mail import finished with errors. count={len(errors)}",
details="\n".join(errors[:10]),
)
last_run_at = now
except Exception:
# Never let the thread die.
try:
with app.app_context():
log_admin_event("mail_import_auto_error", "Automatic importer thread recovered from an unexpected exception.")
except Exception:
pass
time.sleep(5)
t = threading.Thread(target=_worker, name=_AUTO_IMPORTER_THREAD_NAME, daemon=True)
t.start()

View File

@ -0,0 +1,18 @@
import os
class Config:
def __init__(self) -> None:
self.SECRET_KEY = os.environ.get("APP_SECRET_KEY", "dev-secret-key")
self.SQLALCHEMY_DATABASE_URI = self._build_database_uri()
self.SQLALCHEMY_TRACK_MODIFICATIONS = False
self.APP_ENV = os.environ.get("APP_ENV", "development")
self.APP_PORT = int(os.environ.get("APP_PORT", "8080"))
self.TIMEZONE = "Europe/Amsterdam"
def _build_database_uri(self) -> str:
db_name = os.environ.get("POSTGRES_DB", "backup")
db_user = os.environ.get("POSTGRES_USER", "backup")
db_password = os.environ.get("POSTGRES_PASSWORD", "")
db_host = os.environ.get("DB_HOST", "localhost")
db_port = int(os.environ.get("DB_PORT", "5432"))
return f"postgresql+psycopg2://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"

View File

@ -0,0 +1,3 @@
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()

View File

@ -0,0 +1,127 @@
from __future__ import annotations
from email import policy
from email.parser import BytesParser
from email.utils import parseaddr
from typing import List, Optional, Tuple
def normalize_from_address(value: str | None) -> str | None:
"""Normalize sender address for matching.
- Extracts the email part from potential display-name formats (e.g. "Name <user@domain>").
- Strips whitespace and lowercases.
- Returns None for empty / missing values.
"""
if not value:
return None
_name, addr = parseaddr(value)
addr = (addr or value).strip()
if not addr:
return None
return addr.lower()
def _decode_bytes_best_effort(data: bytes) -> str:
"""Decode raw bytes into text using common encodings.
Many HTML report attachments are UTF-16LE (often visible as null-bytes),
but UTF-8 is also common. We try a small set of encodings and fall back
to a safe replacement strategy.
"""
if not data:
return ""
# Heuristic: UTF-16LE often contains many zero bytes.
if b"\x00" in data[:200]:
for enc in ("utf-16", "utf-16le", "utf-16be"):
try:
return data.decode(enc)
except Exception:
pass
for enc in ("utf-8", "utf-8-sig", "windows-1252", "latin-1"):
try:
return data.decode(enc)
except Exception:
pass
return data.decode("utf-8", errors="replace")
def extract_html_attachments_from_eml(
eml_bytes: bytes | None,
*,
max_attachments: int = 5,
max_bytes_per_attachment: int = 2_000_000,
) -> List[Tuple[Optional[str], str]]:
"""Extract HTML attachment(s) from a raw RFC822 (.eml) message.
Returns a list of (filename, html_text) tuples.
The HTML is returned as plain text (no scripts executed).
"""
if not eml_bytes:
return []
try:
msg = BytesParser(policy=policy.default).parsebytes(eml_bytes)
except Exception:
return []
results: List[Tuple[Optional[str], str]] = []
for part in msg.walk():
if len(results) >= max_attachments:
break
# Skip multipart containers
if part.is_multipart():
continue
disposition = (part.get_content_disposition() or "").lower()
filename = part.get_filename()
content_type = (part.get_content_type() or "").lower()
# Only inspect attachments (or parts that clearly look like report files)
looks_like_attachment = disposition == "attachment" or bool(filename)
if not looks_like_attachment:
continue
is_html_type = content_type == "text/html"
is_html_name = isinstance(filename, str) and filename.lower().endswith(".html")
if not (is_html_type or is_html_name):
continue
try:
payload = part.get_payload(decode=True) or b""
except Exception:
continue
if max_bytes_per_attachment and len(payload) > max_bytes_per_attachment:
# Safety: skip very large files
continue
html_text = _decode_bytes_best_effort(payload).strip()
if not html_text:
continue
results.append((filename, html_text))
return results
def extract_best_html_from_eml(
eml_bytes: bytes | None,
*,
max_bytes_per_attachment: int = 2_000_000,
) -> Optional[str]:
"""Convenience: return the first HTML attachment content (if any)."""
items = extract_html_attachments_from_eml(
eml_bytes,
max_attachments=1,
max_bytes_per_attachment=max_bytes_per_attachment,
)
if not items:
return None
_fn, html_text = items[0]
return html_text or None

View File

@ -0,0 +1,71 @@
from __future__ import annotations
from typing import Optional, Tuple
from .email_utils import normalize_from_address
from .models import Job, MailMessage
def build_job_match_key(msg: MailMessage) -> Tuple[Optional[str], Optional[str], Optional[str], Optional[str]]:
"""Build the canonical matching key for a message/job.
Matching key (unique):
- from_address (normalized email address)
- backup_software (trimmed)
- backup_type (trimmed)
- job_name (trimmed)
"""
norm_from = normalize_from_address(getattr(msg, "from_address", None))
backup = (getattr(msg, "backup_software", None) or "").strip() or None
btype = (getattr(msg, "backup_type", None) or "").strip() or None
job_name = (getattr(msg, "job_name", None) or "").strip() or None
return norm_from, backup, btype, job_name
def find_matching_job(msg: MailMessage) -> Optional[Job]:
"""Find the single matching Job for a message using (From, Backup, Type, Job name).
If multiple jobs match:
- If all belong to the same customer, pick the most recently updated.
- Otherwise, return None (ambiguous / unsafe).
"""
norm_from, backup, btype, job_name = build_job_match_key(msg)
q = Job.query
if norm_from is None:
q = q.filter(Job.from_address.is_(None))
else:
q = q.filter(Job.from_address == norm_from)
if backup is None:
q = q.filter(Job.backup_software.is_(None))
else:
q = q.filter(Job.backup_software == backup)
if btype is None:
q = q.filter(Job.backup_type.is_(None))
else:
q = q.filter(Job.backup_type == btype)
if job_name is None:
q = q.filter(Job.job_name.is_(None))
else:
q = q.filter(Job.job_name == job_name)
# Do not load all matches into memory; we only need to know if there are
# zero, one, or multiple matches.
matches = q.order_by(Job.updated_at.desc(), Job.id.desc()).limit(2).all()
if len(matches) > 1:
customer_ids = {m.customer_id for m in matches}
if len(customer_ids) == 1:
return matches[0]
return None
if len(matches) == 1:
return matches[0]
return None

View File

@ -0,0 +1,673 @@
from __future__ import annotations
from datetime import datetime, timezone, timedelta
from typing import List
import socket
from concurrent.futures import ThreadPoolExecutor, TimeoutError as FuturesTimeoutError
from urllib.parse import urlparse
import requests
from sqlalchemy import func
from . import db
from .models import MailMessage, SystemSettings, Job, JobRun
from .parsers import parse_mail_message
from .email_utils import normalize_from_address, extract_best_html_from_eml
from .job_matching import find_matching_job
GRAPH_TOKEN_URL_TEMPLATE = "https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token"
GRAPH_BASE_URL = "https://graph.microsoft.com/v1.0"
class MailImportError(Exception):
pass
def _get_access_token(settings: SystemSettings) -> str:
if not settings.graph_tenant_id or not settings.graph_client_id or not settings.graph_client_secret:
raise MailImportError("Graph credentials are not fully configured.")
token_url = GRAPH_TOKEN_URL_TEMPLATE.format(tenant_id=settings.graph_tenant_id)
data = {
"client_id": settings.graph_client_id,
"client_secret": settings.graph_client_secret,
"grant_type": "client_credentials",
"scope": "https://graph.microsoft.com/.default",
}
resp = requests.post(token_url, data=data, timeout=15)
if resp.status_code != 200:
raise MailImportError(f"Failed to obtain access token from Microsoft Graph (status {resp.status_code}).")
payload = resp.json()
access_token = payload.get("access_token")
if not access_token:
raise MailImportError("Access token not present in Graph response.")
return access_token
def _build_auth_headers(access_token: str) -> dict:
return {
"Authorization": f"Bearer {access_token}",
"Accept": "application/json",
}
def _can_resolve_hostname(hostname: str, timeout_seconds: int = 2) -> bool:
"""Best-effort DNS preflight.
requests' connect timeout does not cover DNS resolution time.
When DNS is slow/unavailable, a sync gunicorn worker can hit WORKER TIMEOUT.
We therefore preflight resolution and skip move operations if it hangs.
"""
if not hostname:
return False
try:
# signal.* cannot be used outside the main thread (gunicorn worker threads / schedulers).
# Use a small worker thread and a hard timeout instead.
with ThreadPoolExecutor(max_workers=1) as ex:
fut = ex.submit(socket.getaddrinfo, hostname, 443)
fut.result(timeout=float(timeout_seconds))
return True
except FuturesTimeoutError:
return False
except Exception:
return False
def _fetch_eml_bytes(mailbox: str, msg_id: str, access_token: str) -> bytes | None:
"""Fetch raw RFC822 (.eml) content for a message id via Microsoft Graph."""
if not mailbox or not msg_id:
return None
url = f"{GRAPH_BASE_URL}/users/{mailbox}/messages/{msg_id}/$value"
headers = {
"Authorization": f"Bearer {access_token}",
"Accept": "application/octet-stream",
}
try:
resp = requests.get(url, headers=headers, timeout=30)
except Exception:
return None
if resp.status_code != 200:
return None
return resp.content or None
def _resolve_folder_id(settings: SystemSettings, access_token: str, folder_path: str) -> str:
"""Resolve a displayName path like 'Inbox/Backup Database' to a folder id."""
if not settings.graph_mailbox:
raise MailImportError("Mailbox address is not configured.")
folder_path = (folder_path or "").strip()
if not folder_path:
raise MailImportError("Folder path is empty.")
segments = [seg.strip() for seg in folder_path.split("/") if seg.strip()]
if not segments:
raise MailImportError("Folder path is empty.")
headers = _build_auth_headers(access_token)
mailbox = settings.graph_mailbox
retention_days = getattr(settings, "ingest_eml_retention_days", 7)
try:
retention_days = int(retention_days) if retention_days is not None else 7
except (ValueError, TypeError):
retention_days = 7
if retention_days not in (0, 7, 14):
retention_days = 7
# Fetch top-level mailFolders (Inbox, Archive, etc.)
url = f"{GRAPH_BASE_URL}/users/{mailbox}/mailFolders?$top=100"
resp = requests.get(url, headers=headers, timeout=20)
if resp.status_code != 200:
raise MailImportError(f"Failed to list top-level mail folders (status {resp.status_code}).")
data = resp.json()
folders = data.get("value", [])
def _find_by_name(items, name):
name_lower = name.lower()
for item in items:
if str(item.get("displayName", "")).lower() == name_lower:
return item
return None
current_folder = _find_by_name(folders, segments[0])
if not current_folder:
raise MailImportError(f"Folder '{segments[0]}' not found in mailbox.")
# Walk down childFolders if there are more segments
for segment in segments[1:]:
parent_id = current_folder.get("id")
url = f"{GRAPH_BASE_URL}/users/{mailbox}/mailFolders/{parent_id}/childFolders?$top=100"
resp = requests.get(url, headers=headers, timeout=20)
if resp.status_code != 200:
raise MailImportError(
f"Failed to list child folders for '{current_folder.get('displayName')}' (status {resp.status_code})."
)
children = resp.json().get("value", [])
next_folder = _find_by_name(children, segment)
if not next_folder:
raise MailImportError(
f"Folder '{segment}' not found under '{current_folder.get('displayName')}'."
)
current_folder = next_folder
folder_id = current_folder.get("id")
if not folder_id:
raise MailImportError("Resolved folder does not have an id.")
return folder_id
def _parse_graph_datetime(value: str | None):
if not value:
return None
try:
dt = datetime.fromisoformat(value.replace("Z", "+00:00"))
return dt.astimezone(timezone.utc).replace(tzinfo=None)
except Exception:
return None
def _store_messages(settings: SystemSettings, messages):
total = 0
new_count = 0
auto_approved = 0
auto_approved_runs = []
for msg in messages:
total += 1
graph_id = msg.get("id")
if not graph_id:
continue
existing = MailMessage.query.filter_by(message_id=graph_id).first()
if existing:
continue
from_info = msg.get("from") or {}
email_info = from_info.get("emailAddress") or {}
from_addr = normalize_from_address(email_info.get("address"))
subject = msg.get("subject")
received_raw = msg.get("receivedDateTime")
received_at = _parse_graph_datetime(received_raw)
# Decide which body field to populate based on Graph response
body = msg.get("body") or {}
body_content = body.get("content")
body_type = (body.get("contentType") or "").lower()
html_body = None
text_body = None
if isinstance(body_content, str):
if body_type == "html":
html_body = body_content
else:
text_body = body_content
mail = MailMessage(
message_id=graph_id,
from_address=from_addr,
subject=subject,
received_at=received_at,
html_body=html_body,
text_body=text_body,
location="inbox",
eml_blob=msg.get("_eml_bytes"),
eml_stored_at=(datetime.utcnow() if msg.get("_eml_bytes") else None),
)
# Some systems send empty bodies and put the actual report in an HTML attachment.
# If we have raw EML bytes and no body content, extract the first HTML attachment
# and use it as the HTML body so parsers and the inbox preview can work.
if not (mail.html_body or mail.text_body) and mail.eml_blob:
attachment_html = extract_best_html_from_eml(mail.eml_blob)
if attachment_html:
mail.html_body = attachment_html
# IMPORTANT: Persist first so mail.id exists.
# Object extraction stores rows keyed by mail_message_id; without an id,
# objects are silently skipped.
db.session.add(mail)
db.session.flush()
# Immediately run parsers so Inbox / Jobs can show parsed metadata + objects.
try:
parse_mail_message(mail)
except Exception as exc:
# Do not break the import if parsing fails; just record it on the message
if hasattr(mail, "parse_result"):
mail.parse_result = "error"
if hasattr(mail, "parse_error"):
mail.parse_error = str(exc)[:500]
# Auto-approve if this job was already approved before (unique match across customers).
# Mirrors the behavior of the Inbox "Re-parse all" auto-approve.
try:
if (
getattr(mail, "location", "inbox") == "inbox"
and getattr(mail, "parse_result", None) == "ok"
and not bool(getattr(mail, "approved", False))
):
job = find_matching_job(mail)
if job:
# Respect per-job flags.
if hasattr(job, "active") and not bool(job.active):
raise Exception("job not active")
if hasattr(job, "auto_approve") and not bool(job.auto_approve):
raise Exception("job auto_approve disabled")
# Create a new run for the known job
run = JobRun(
job_id=job.id,
mail_message_id=mail.id,
run_at=mail.received_at,
status=mail.overall_status or None,
missed=False,
)
# Optional storage metrics (for capacity graphs)
if hasattr(run, "storage_used_bytes") and hasattr(mail, "storage_used_bytes"):
run.storage_used_bytes = mail.storage_used_bytes
if hasattr(run, "storage_capacity_bytes") and hasattr(mail, "storage_capacity_bytes"):
run.storage_capacity_bytes = mail.storage_capacity_bytes
if hasattr(run, "storage_free_bytes") and hasattr(mail, "storage_free_bytes"):
run.storage_free_bytes = mail.storage_free_bytes
if hasattr(run, "storage_free_percent") and hasattr(mail, "storage_free_percent"):
run.storage_free_percent = mail.storage_free_percent
db.session.add(run)
db.session.flush() # ensure run.id is available
# Update mail message to reflect approval
mail.job_id = job.id
if hasattr(mail, "approved"):
mail.approved = True
if hasattr(mail, "approved_at"):
mail.approved_at = datetime.utcnow()
if hasattr(mail, "location"):
mail.location = "history"
auto_approved += 1
auto_approved_runs.append((job.customer_id, job.id, run.id, mail.id))
except Exception as exc:
db.session.rollback()
raise MailImportError(f"Failed to store mail messages in database: {exc}")
return total, new_count, auto_approved, auto_approved_runs
def run_auto_import(settings: SystemSettings):
"""Execute the automatic import from Microsoft Graph.
Automatic import always uses a fixed batch size (50) and respects the
configured cutoff date. Messages older than the cutoff date are not fetched
and therefore remain in the inbox.
Returns:
(total_fetched, new_messages, auto_approved, auto_approved_runs, errors)
"""
errors: List[str] = []
if not settings.graph_mailbox:
raise MailImportError("Mailbox address is not configured.")
try:
access_token = _get_access_token(settings)
except MailImportError:
raise
except Exception as exc:
raise MailImportError(f"Unexpected error while obtaining Graph token: {exc}")
try:
incoming_folder_id = _resolve_folder_id(settings, access_token, settings.incoming_folder or "Inbox")
except MailImportError:
raise
except Exception as exc:
raise MailImportError(f"Unexpected error while resolving incoming folder: {exc}")
processed_folder_id = None
if getattr(settings, "processed_folder", None):
try:
processed_folder_id = _resolve_folder_id(settings, access_token, settings.processed_folder)
except MailImportError as exc:
# If the processed folder is misconfigured, we still continue the import
errors.append(str(exc))
except Exception as exc:
errors.append(f"Unexpected error while resolving processed folder: {exc}")
headers = _build_auth_headers(access_token)
mailbox = settings.graph_mailbox
retention_days = getattr(settings, "ingest_eml_retention_days", 7)
try:
retention_days = int(retention_days) if retention_days is not None else 7
except (ValueError, TypeError):
retention_days = 7
if retention_days not in (0, 7, 14):
retention_days = 7
batch_size = 50
url = (
f"{GRAPH_BASE_URL}/users/{mailbox}/mailFolders/{incoming_folder_id}/messages"
f"?$top={batch_size}&$orderby=receivedDateTime desc"
)
# Optional cutoff date (UTC midnight). Older messages should remain in inbox.
cutoff_date = getattr(settings, "auto_import_cutoff_date", None)
if cutoff_date:
cutoff_dt = datetime.combine(cutoff_date, datetime.min.time()).replace(tzinfo=timezone.utc)
cutoff_iso = cutoff_dt.strftime('%Y-%m-%dT%H:%M:%SZ')
# Graph requires spaces in $filter to be URL-encoded.
url += f"&$filter=receivedDateTime%20ge%20{cutoff_iso}"
resp = requests.get(url, headers=headers, timeout=20)
if resp.status_code != 200:
raise MailImportError(f"Failed to fetch messages from incoming folder (status {resp.status_code}).")
payload = resp.json()
items = payload.get("value", [])
total_fetched = len(items)
# Fetch full bodies for the fetched messages so inline popup can show content.
for msg in items:
msg_id = msg.get("id")
if not msg_id:
continue
detail_url = f"{GRAPH_BASE_URL}/users/{mailbox}/messages/{msg_id}?$select=body,bodyPreview"
try:
detail_resp = requests.get(detail_url, headers=headers, timeout=20)
except Exception as exc:
errors.append(f"Error while fetching body for message {msg_id}: {exc}")
continue
if detail_resp.status_code != 200:
errors.append(f"Failed to fetch body for message {msg_id} (status {detail_resp.status_code}).")
continue
detail_payload = detail_resp.json()
if "body" in detail_payload:
msg["body"] = detail_payload.get("body")
if "bodyPreview" in detail_payload:
msg["bodyPreview"] = detail_payload.get("bodyPreview")
# Optionally fetch raw EML bytes for new messages (debug storage)
if retention_days > 0:
try:
ids = [m.get("id") for m in items if m.get("id")]
existing_ids = set()
if ids:
existing_ids = {
mid
for (mid,) in db.session.query(MailMessage.message_id)
.filter(MailMessage.message_id.in_(ids))
.all()
if mid
}
for m in items:
mid = m.get("id")
if not mid or mid in existing_ids:
continue
eml_bytes = _fetch_eml_bytes(mailbox, mid, access_token)
if eml_bytes:
m["_eml_bytes"] = eml_bytes
except Exception as exc:
errors.append(f"Unexpected error while fetching EML bytes: {exc}")
auto_approved_runs = []
try:
total_processed, new_messages, auto_approved, auto_approved_runs = _store_messages(settings, items)
except MailImportError as exc:
errors.append(str(exc))
new_messages = 0
auto_approved = 0
auto_approved_runs = []
# Move messages to the processed folder if configured
if processed_folder_id:
graph_host = urlparse(GRAPH_BASE_URL).hostname or ""
if graph_host and not _can_resolve_hostname(graph_host, timeout_seconds=2):
errors.append(
"Skipping move-to-processed step: Microsoft Graph hostname could not be resolved in time. "
"Messages were imported, but will not be moved."
)
processed_folder_id = None
if processed_folder_id:
for msg in items:
msg_id = msg.get("id")
if not msg_id:
continue
move_url = f"{GRAPH_BASE_URL}/users/{mailbox}/messages/{msg_id}/move"
try:
move_resp = requests.post(
move_url,
headers=headers,
json={"destinationId": processed_folder_id},
timeout=20,
)
except Exception as exc:
errors.append(f"Error while moving message {msg_id}: {exc}")
continue
if move_resp.status_code not in (200, 201):
errors.append(
f"Failed to move message {msg_id} to processed folder "
f"(status {move_resp.status_code})."
)
# Cleanup stored EML blobs based on retention policy
try:
if retention_days == 0:
MailMessage.query.filter(MailMessage.eml_blob.isnot(None)).update(
{MailMessage.eml_blob: None, MailMessage.eml_stored_at: None},
synchronize_session=False,
)
db.session.commit()
else:
cutoff = datetime.utcnow() - timedelta(days=retention_days)
MailMessage.query.filter(
MailMessage.eml_stored_at.isnot(None),
MailMessage.eml_stored_at < cutoff,
).update(
{MailMessage.eml_blob: None, MailMessage.eml_stored_at: None},
synchronize_session=False,
)
db.session.commit()
except Exception as exc:
db.session.rollback()
errors.append(f"Failed to cleanup stored EML blobs: {exc}")
return total_fetched, new_messages, auto_approved, auto_approved_runs, errors
def run_manual_import(settings: SystemSettings, batch_size: int):
"""Execute a one-off manual import from Microsoft Graph.
Returns:
(total_fetched, new_messages, auto_approved, auto_approved_runs, errors)
"""
errors: List[str] = []
if not settings.graph_mailbox:
raise MailImportError("Mailbox address is not configured.")
try:
access_token = _get_access_token(settings)
except MailImportError as exc:
raise
except Exception as exc:
raise MailImportError(f"Unexpected error while obtaining Graph token: {exc}")
try:
incoming_folder_id = _resolve_folder_id(settings, access_token, settings.incoming_folder or "Inbox")
except MailImportError as exc:
raise
except Exception as exc:
raise MailImportError(f"Unexpected error while resolving incoming folder: {exc}")
processed_folder_id = None
if getattr(settings, "processed_folder", None):
try:
processed_folder_id = _resolve_folder_id(settings, access_token, settings.processed_folder)
except MailImportError as exc:
# If the processed folder is misconfigured, we still continue the import
errors.append(str(exc))
except Exception as exc:
errors.append(f"Unexpected error while resolving processed folder: {exc}")
headers = _build_auth_headers(access_token)
mailbox = settings.graph_mailbox
retention_days = getattr(settings, "ingest_eml_retention_days", 7)
try:
retention_days = int(retention_days) if retention_days is not None else 7
except (ValueError, TypeError):
retention_days = 7
if retention_days not in (0, 7, 14):
retention_days = 7
url = (
f"{GRAPH_BASE_URL}/users/{mailbox}/mailFolders/{incoming_folder_id}/messages"
f"?$top={batch_size}&$orderby=receivedDateTime desc"
)
resp = requests.get(url, headers=headers, timeout=20)
if resp.status_code != 200:
raise MailImportError(f"Failed to fetch messages from incoming folder (status {resp.status_code}).")
payload = resp.json()
items = payload.get("value", [])
total_fetched = len(items)
# Fetch full bodies for the fetched messages so inline popup can show content.
# We keep this simple: for each new message, fetch its body (HTML or text).
for msg in items:
msg_id = msg.get("id")
if not msg_id:
continue
detail_url = f"{GRAPH_BASE_URL}/users/{mailbox}/messages/{msg_id}?$select=body,bodyPreview"
try:
detail_resp = requests.get(detail_url, headers=headers, timeout=20)
except Exception as exc:
errors.append(f"Error while fetching body for message {msg_id}: {exc}")
continue
if detail_resp.status_code != 200:
errors.append(
f"Failed to fetch body for message {msg_id} (status {detail_resp.status_code})."
)
continue
detail_payload = detail_resp.json()
if "body" in detail_payload:
msg["body"] = detail_payload.get("body")
if "bodyPreview" in detail_payload:
msg["bodyPreview"] = detail_payload.get("bodyPreview")
# Optionally fetch raw EML bytes for new messages (debug storage)
if retention_days > 0:
try:
ids = [m.get("id") for m in items if m.get("id")]
existing_ids = set()
if ids:
existing_ids = {
mid
for (mid,) in db.session.query(MailMessage.message_id)
.filter(MailMessage.message_id.in_(ids))
.all()
if mid
}
for m in items:
mid = m.get("id")
if not mid or mid in existing_ids:
continue
eml_bytes = _fetch_eml_bytes(mailbox, mid, access_token)
if eml_bytes:
m["_eml_bytes"] = eml_bytes
except Exception as exc:
errors.append(f"Unexpected error while fetching EML bytes: {exc}")
auto_approved = 0
auto_approved_runs = []
try:
total_processed, new_messages, auto_approved, auto_approved_runs = _store_messages(settings, items)
except MailImportError as exc:
errors.append(str(exc))
new_messages = 0
auto_approved_runs = []
# Move messages to the processed folder if configured
if processed_folder_id:
graph_host = urlparse(GRAPH_BASE_URL).hostname or ""
if graph_host and not _can_resolve_hostname(graph_host, timeout_seconds=2):
errors.append(
"Skipping move-to-processed step: Microsoft Graph hostname could not be resolved in time. "
"Messages were imported, but will not be moved."
)
processed_folder_id = None
if processed_folder_id:
for msg in items:
msg_id = msg.get("id")
if not msg_id:
continue
move_url = f"{GRAPH_BASE_URL}/users/{mailbox}/messages/{msg_id}/move"
try:
move_resp = requests.post(
move_url,
headers=headers,
json={"destinationId": processed_folder_id},
timeout=20,
)
except Exception as exc:
errors.append(f"Error while moving message {msg_id}: {exc}")
continue
if move_resp.status_code not in (200, 201):
errors.append(
f"Failed to move message {msg_id} to processed folder "
f"(status {move_resp.status_code})."
)
# Cleanup stored EML blobs based on retention policy
try:
if retention_days == 0:
MailMessage.query.filter(MailMessage.eml_blob.isnot(None)).update(
{MailMessage.eml_blob: None, MailMessage.eml_stored_at: None},
synchronize_session=False,
)
db.session.commit()
else:
cutoff = datetime.utcnow() - timedelta(days=retention_days)
MailMessage.query.filter(
MailMessage.eml_stored_at.isnot(None),
MailMessage.eml_stored_at < cutoff,
).update(
{MailMessage.eml_blob: None, MailMessage.eml_stored_at: None},
synchronize_session=False,
)
db.session.commit()
except Exception as exc:
db.session.rollback()
errors.append(f"Failed to cleanup stored EML blobs: {exc}")
return total_fetched, new_messages, auto_approved, auto_approved_runs, errors

View File

@ -0,0 +1,9 @@
import os
from . import create_app
app = create_app()
if __name__ == "__main__":
port = int(os.environ.get("APP_PORT", 8080))
app.run(host="0.0.0.0", port=port)

View File

@ -0,0 +1,29 @@
"""Main blueprint routes.
This module keeps a small import surface for app creation while the actual
view functions are organized across multiple modules.
"""
from .routes_shared import main_bp, roles_required # noqa: F401
# Import route modules so their decorators register with the blueprint.
from . import routes_core # noqa: F401
from . import routes_news # noqa: F401
from . import routes_inbox # noqa: F401
from . import routes_customers # noqa: F401
from . import routes_jobs # noqa: F401
from . import routes_settings # noqa: F401
from . import routes_daily_jobs # noqa: F401
from . import routes_run_checks # noqa: F401
from . import routes_overrides # noqa: F401
from . import routes_parsers # noqa: F401
from . import routes_changelog # noqa: F401
from . import routes_reports # noqa: F401
from . import routes_tickets # noqa: F401
from . import routes_remarks # noqa: F401
from . import routes_feedback # noqa: F401
from . import routes_api # noqa: F401
from . import routes_reporting_api # noqa: F401
from . import routes_user_settings # noqa: F401
__all__ = ["main_bp", "roles_required"]

View File

@ -0,0 +1,492 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime, _get_ui_timezone_name, _next_ticket_code, _to_amsterdam_date
@main_bp.route("/api/job-runs/<int:run_id>/alerts")
@login_required
@roles_required("admin", "operator", "viewer")
def api_job_run_alerts(run_id: int):
run = JobRun.query.get_or_404(run_id)
job = Job.query.get(run.job_id) if run else None
run_date = _to_amsterdam_date(run.run_at)
if run_date is None:
run_date = _to_amsterdam_date(datetime.utcnow())
tickets = []
remarks = []
# Tickets active for this job on this run date (including resolved-on-day)
try:
rows = (
db.session.execute(
text(
"""
SELECT t.id, t.ticket_code, t.description, t.start_date, t.resolved_at, t.active_from_date
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
AND t.active_from_date <= :run_date
AND (
t.resolved_at IS NULL
OR ((t.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :run_date
)
ORDER BY t.start_date DESC
"""
),
{
"job_id": job.id if job else None,
"run_date": run_date,
"ui_tz": _get_ui_timezone_name(),
},
)
.mappings()
.all()
)
for r in rows:
resolved_at = r.get("resolved_at")
resolved_same_day = False
if resolved_at and run_date:
resolved_same_day = _to_amsterdam_date(resolved_at) == run_date
active_now = r.get("resolved_at") is None
tickets.append(
{
"id": int(r.get("id")),
"ticket_code": r.get("ticket_code") or "",
"description": r.get("description") or "",
"start_date": _format_datetime(r.get("start_date")),
"active_from_date": str(r.get("active_from_date")) if r.get("active_from_date") else "",
"resolved_at": _format_datetime(r.get("resolved_at")) if r.get("resolved_at") else "",
"active": bool(active_now),
"resolved_same_day": bool(resolved_same_day),
}
)
except Exception as exc:
return jsonify({"status": "error", "message": str(exc) or "Failed to load tickets."}), 500
# Remarks active for this job on this run date (including resolved-on-day)
try:
rows = (
db.session.execute(
text(
"""
SELECT r.id, r.body, r.start_date, r.resolved_at, r.active_from_date
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
AND COALESCE(
r.active_from_date,
((r.start_date AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date)
) <= :run_date
AND (
r.resolved_at IS NULL
OR ((r.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :run_date
)
ORDER BY r.start_date DESC
"""
),
{
"job_id": job.id if job else None,
"run_date": run_date,
"ui_tz": _get_ui_timezone_name(),
},
)
.mappings()
.all()
)
for rr in rows:
body = (rr.get("body") or "").strip()
if len(body) > 180:
body = body[:177] + "..."
resolved_at = rr.get("resolved_at")
resolved_same_day = False
if resolved_at and run_date:
resolved_same_day = _to_amsterdam_date(resolved_at) == run_date
active_now = resolved_at is None or (not resolved_same_day)
remarks.append(
{
"id": int(rr.get("id")),
"body": body,
"start_date": _format_datetime(rr.get("start_date")) if rr.get("start_date") else "-",
"active_from_date": str(rr.get("active_from_date")) if rr.get("active_from_date") else "",
"resolved_at": _format_datetime(rr.get("resolved_at")) if rr.get("resolved_at") else "",
"active": bool(active_now),
"resolved_same_day": bool(resolved_same_day),
}
)
except Exception as exc:
return jsonify({"status": "error", "message": str(exc) or "Failed to load remarks."}), 500
payload_job = {
"job_id": job.id if job else None,
"job_name": job.job_name if job else "",
"customer_id": job.customer_id if job else None,
"backup_software": job.backup_software if job else "",
"backup_type": job.backup_type if job else "",
}
return jsonify({"status": "ok", "job": payload_job, "tickets": tickets, "remarks": remarks})
@main_bp.route("/api/tickets", methods=["GET", "POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_tickets():
if request.method == "GET":
active = (request.args.get("active") or "1").strip() != "0"
q = (request.args.get("q") or "").strip()
try:
customer_id = int(request.args.get("customer_id") or 0)
except Exception:
customer_id = 0
query = Ticket.query
if active:
query = query.filter(Ticket.resolved_at.is_(None))
if q:
like_q = f"%{q}%"
query = query.filter(
(Ticket.ticket_code.ilike(like_q))
| (Ticket.description.ilike(like_q))
)
if customer_id:
query = query.join(TicketScope, TicketScope.ticket_id == Ticket.id).filter(TicketScope.customer_id == customer_id)
query = query.order_by(Ticket.start_date.desc()).limit(500)
items = []
for t in query.all():
items.append(
{
"id": t.id,
"ticket_code": t.ticket_code,
"description": t.description or "",
"active_from_date": str(getattr(t, "active_from_date", "") or ""),
"start_date": _format_datetime(t.start_date),
"resolved_at": _format_datetime(t.resolved_at) if t.resolved_at else "",
"active": t.resolved_at is None,
}
)
return jsonify({"status": "ok", "tickets": items})
# POST
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
payload = request.get_json(silent=True) or {}
description = (payload.get("description") or "").strip() or None
try:
run_id = int(payload.get("job_run_id") or 0)
except Exception:
run_id = 0
if run_id <= 0:
return jsonify({"status": "error", "message": "job_run_id is required."}), 400
run = JobRun.query.get(run_id)
if not run:
return jsonify({"status": "error", "message": "Job run not found."}), 404
job = Job.query.get(run.job_id) if run else None
now = datetime.utcnow()
code = _next_ticket_code(now)
ticket = Ticket(
ticket_code=code,
title=None,
description=description,
active_from_date=_to_amsterdam_date(run.run_at) or _to_amsterdam_date(now) or now.date(),
start_date=now,
resolved_at=None,
)
try:
db.session.add(ticket)
db.session.flush()
# Minimal scope from job
scope = TicketScope(
ticket_id=ticket.id,
scope_type="job",
customer_id=job.customer_id if job else None,
backup_software=job.backup_software if job else None,
backup_type=job.backup_type if job else None,
job_id=job.id if job else None,
job_name_match=job.job_name if job else None,
job_name_match_mode="exact",
)
db.session.add(scope)
link = TicketJobRun(ticket_id=ticket.id, job_run_id=run.id, link_source="manual")
db.session.add(link)
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to create ticket."}), 500
return jsonify(
{
"status": "ok",
"ticket": {
"id": ticket.id,
"ticket_code": ticket.ticket_code,
"description": ticket.description or "",
"start_date": _format_datetime(ticket.start_date),
"active_from_date": str(ticket.active_from_date) if getattr(ticket, "active_from_date", None) else "",
"resolved_at": "",
"active": True,
},
}
)
@main_bp.route("/api/tickets/<int:ticket_id>", methods=["PATCH"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_ticket_update(ticket_id: int):
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
ticket = Ticket.query.get_or_404(ticket_id)
payload = request.get_json(silent=True) or {}
if "description" in payload:
ticket.description = (payload.get("description") or "").strip() or None
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to update ticket."}), 500
return jsonify({"status": "ok"})
@main_bp.route("/api/tickets/<int:ticket_id>/resolve", methods=["POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_ticket_resolve(ticket_id: int):
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
ticket = Ticket.query.get_or_404(ticket_id)
if ticket.resolved_at is None:
ticket.resolved_at = datetime.utcnow()
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to resolve ticket."}), 500
# If this endpoint is called from a regular HTML form submit (e.g. Tickets/Remarks page),
# redirect back instead of showing raw JSON in the browser.
if not request.is_json and "application/json" not in (request.headers.get("Accept") or ""):
return redirect(request.referrer or url_for("main.tickets_page"))
return jsonify({"status": "ok", "resolved_at": _format_datetime(ticket.resolved_at)})
@main_bp.route("/api/tickets/<int:ticket_id>/link-run", methods=["POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_ticket_link_run(ticket_id: int):
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
ticket = Ticket.query.get_or_404(ticket_id)
payload = request.get_json(silent=True) or {}
try:
run_id = int(payload.get("job_run_id") or 0)
except Exception:
run_id = 0
if run_id <= 0:
return jsonify({"status": "error", "message": "job_run_id is required."}), 400
run = JobRun.query.get(run_id)
if not run:
return jsonify({"status": "error", "message": "Job run not found."}), 404
link = TicketJobRun(ticket_id=ticket.id, job_run_id=run.id, link_source="manual")
try:
db.session.add(link)
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to link run."}), 500
return jsonify({"status": "ok"})
@main_bp.route("/api/remarks", methods=["GET", "POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_remarks():
if request.method == "GET":
active = (request.args.get("active") or "1").strip() != "0"
q = (request.args.get("q") or "").strip()
query = Remark.query
if active:
query = query.filter(Remark.resolved_at.is_(None))
if q:
like_q = f"%{q}%"
query = query.filter(Remark.body.ilike(like_q))
query = query.order_by(Remark.start_date.desc()).limit(500)
items = []
for r in query.all():
items.append(
{
"id": r.id,
"body": r.body or "",
"active_from_date": str(getattr(r, "active_from_date", "") or ""),
"start_date": _format_datetime(r.start_date) if r.start_date else "-",
"resolved_at": _format_datetime(r.resolved_at) if r.resolved_at else "",
"active": r.resolved_at is None,
}
)
return jsonify({"status": "ok", "remarks": items})
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
payload = request.get_json(silent=True) or {}
body = (payload.get("body") or "").strip() or ""
try:
run_id = int(payload.get("job_run_id") or 0)
except Exception:
run_id = 0
if run_id <= 0:
return jsonify({"status": "error", "message": "job_run_id is required."}), 400
run = JobRun.query.get(run_id)
if not run:
return jsonify({"status": "error", "message": "Job run not found."}), 404
job = Job.query.get(run.job_id) if run else None
now = datetime.utcnow()
remark = Remark(
title=None,
body=body,
active_from_date=_to_amsterdam_date(run.run_at) or _to_amsterdam_date(now) or now.date(),
start_date=now,
resolved_at=None,
)
try:
db.session.add(remark)
db.session.flush()
scope = RemarkScope(
remark_id=remark.id,
scope_type="job",
customer_id=job.customer_id if job else None,
backup_software=job.backup_software if job else None,
backup_type=job.backup_type if job else None,
job_id=job.id if job else None,
job_name_match=job.job_name if job else None,
job_name_match_mode="exact",
)
db.session.add(scope)
link = RemarkJobRun(remark_id=remark.id, job_run_id=run.id, link_source="manual")
db.session.add(link)
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to create remark."}), 500
return jsonify(
{
"status": "ok",
"remark": {
"id": remark.id,
"body": remark.body or "",
"start_date": _format_datetime(remark.start_date),
"resolved_at": "",
"active": True,
},
}
)
@main_bp.route("/api/remarks/<int:remark_id>", methods=["PATCH"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_remark_update(remark_id: int):
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
remark = Remark.query.get_or_404(remark_id)
payload = request.get_json(silent=True) or {}
if "body" in payload:
remark.body = (payload.get("body") or "").strip() or ""
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to update remark."}), 500
return jsonify({"status": "ok"})
@main_bp.route("/api/remarks/<int:remark_id>/resolve", methods=["POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_remark_resolve(remark_id: int):
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
remark = Remark.query.get_or_404(remark_id)
if remark.resolved_at is None:
remark.resolved_at = datetime.utcnow()
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to resolve remark."}), 500
# If this endpoint is called from a regular HTML form submit (e.g. Tickets/Remarks page),
# redirect back instead of showing raw JSON in the browser.
if not request.is_json and "application/json" not in (request.headers.get("Accept") or ""):
return redirect(request.referrer or url_for("main.tickets_page"))
return jsonify({"status": "ok", "resolved_at": _format_datetime(remark.resolved_at)})
@main_bp.route("/api/remarks/<int:remark_id>/link-run", methods=["POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def api_remark_link_run(remark_id: int):
if get_active_role() not in ("admin", "operator"):
return jsonify({"status": "error", "message": "Forbidden."}), 403
remark = Remark.query.get_or_404(remark_id)
payload = request.get_json(silent=True) or {}
try:
run_id = int(payload.get("job_run_id") or 0)
except Exception:
run_id = 0
if run_id <= 0:
return jsonify({"status": "error", "message": "job_run_id is required."}), 400
run = JobRun.query.get(run_id)
if not run:
return jsonify({"status": "error", "message": "Job run not found."}), 404
link = RemarkJobRun(remark_id=remark.id, job_run_id=run.id, link_source="manual")
try:
db.session.add(link)
db.session.commit()
except Exception as exc:
db.session.rollback()
return jsonify({"status": "error", "message": str(exc) or "Failed to link run."}), 500
return jsonify({"status": "ok"})

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,310 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime, _get_database_size_bytes, _apply_overrides_to_run, _format_bytes, _get_free_disk_bytes, _infer_schedule_map_from_runs
@main_bp.route("/")
@login_required
def dashboard():
# Inbox open items
try:
inbox_query = MailMessage.query
if hasattr(MailMessage, "location"):
inbox_query = inbox_query.filter(MailMessage.location == "inbox")
inbox_count = int(inbox_query.count() or 0)
except Exception:
inbox_count = 0
# Daily job status counters for today (Europe/Amsterdam)
try:
from zoneinfo import ZoneInfo
tz = _get_ui_timezone()
except Exception:
tz = None
# Robust fallback: the dashboard should never degrade into UTC/None because it would
# incorrectly count not-yet-due jobs as Missed.
try:
if tz is None:
tz = ZoneInfo("Europe/Amsterdam")
except Exception:
tz = None
today_date = datetime.now(tz).date() if tz else datetime.utcnow().date()
if tz:
local_midnight = datetime(
year=today_date.year,
month=today_date.month,
day=today_date.day,
hour=0,
minute=0,
second=0,
tzinfo=tz,
)
start_of_day = local_midnight.astimezone(datetime_module.timezone.utc).replace(tzinfo=None)
end_of_day = (
local_midnight + timedelta(days=1)
).astimezone(datetime_module.timezone.utc).replace(tzinfo=None)
else:
start_of_day = datetime(
year=today_date.year,
month=today_date.month,
day=today_date.day,
hour=0,
minute=0,
second=0,
)
end_of_day = start_of_day + timedelta(days=1)
weekday_idx = today_date.weekday() # 0=Mon..6=Sun
jobs_success_count = 0
jobs_success_override_count = 0
jobs_expected_count = 0
jobs_warning_count = 0
jobs_error_count = 0
jobs_missed_count = 0
try:
now_utc = datetime.utcnow().replace(tzinfo=datetime_module.timezone.utc)
now_local = now_utc.astimezone(tz) if tz else now_utc
jobs = Job.query.join(Customer, isouter=True).all()
for job in jobs:
schedule_map = _infer_schedule_map_from_runs(job.id)
expected_times = schedule_map.get(weekday_idx) or []
if not expected_times:
continue
# Build expected datetimes for today in UI local time.
expected_dt_local: list[datetime] = []
if tz:
for tstr in expected_times:
try:
hh, mm = [int(x) for x in (tstr or "").split(":", 1)]
except Exception:
continue
expected_dt_local.append(
datetime(
year=today_date.year,
month=today_date.month,
day=today_date.day,
hour=hh,
minute=mm,
second=0,
tzinfo=tz,
)
)
expected_dt_local = sorted(expected_dt_local)
runs_for_day = (
JobRun.query.filter(
JobRun.job_id == job.id,
JobRun.run_at >= start_of_day,
JobRun.run_at < end_of_day,
)
.order_by(JobRun.run_at.asc())
.all()
)
status = ""
override_applied = False
if runs_for_day:
last_run = runs_for_day[-1]
try:
status_display, override_applied, _override_level, _ov_id, _ov_reason = _apply_overrides_to_run(job, last_run)
status_raw = (status_display or last_run.status or "").strip()
status = status_raw.lower()
if override_applied and ("override" in status or status_display):
# Normalize to success for dashboard bucketing when an override is applied.
status = "success"
except Exception:
status = (last_run.status or "").strip().lower()
override_applied = False
else:
status = ""
# Determine if the job is still expected to run later today.
is_expected = False
if tz and expected_dt_local:
last_run_local = None
if runs_for_day and getattr(runs_for_day[-1], "run_at", None):
try:
dt = runs_for_day[-1].run_at
if dt.tzinfo is None:
dt = dt.replace(tzinfo=datetime_module.timezone.utc)
last_run_local = dt.astimezone(tz)
except Exception:
last_run_local = None
anchor = last_run_local or datetime(
year=today_date.year,
month=today_date.month,
day=today_date.day,
hour=0,
minute=0,
second=0,
tzinfo=tz,
)
next_expected = None
for edt in expected_dt_local:
if edt > anchor:
next_expected = edt
break
if next_expected is not None and now_local < next_expected:
is_expected = True
# Status precedence:
# - Warning/Error always reflect the latest run
# - Otherwise show Expected if a next run is still upcoming
# - Otherwise show Success (and Success override) when a run exists
# - Otherwise show Missed once the last expected window has passed
if status == "warning":
jobs_warning_count += 1
elif status in ("error", "failed", "failure"):
jobs_error_count += 1
elif is_expected:
jobs_expected_count += 1
elif override_applied and runs_for_day:
jobs_success_override_count += 1
elif status in ("success", "ok") and runs_for_day:
jobs_success_count += 1
else:
# No successful run and no upcoming expected run -> missed
jobs_missed_count += 1
except Exception:
# Keep zeros on any unexpected failure
pass
# System status (same helpers as Settings)
db_size_bytes = _get_database_size_bytes()
free_disk_bytes = _get_free_disk_bytes()
db_size_human = _format_bytes(db_size_bytes)
free_disk_human = _format_bytes(free_disk_bytes)
free_disk_warning = False
try:
free_disk_warning = free_disk_bytes is not None and free_disk_bytes < (2 * 1024 * 1024 * 1024)
except Exception:
free_disk_warning = False
# News (unread per user)
news_items = []
try:
now = datetime.utcnow()
uid = getattr(current_user, "id", None)
if uid:
q = NewsItem.query.filter(NewsItem.active.is_(True))
q = q.filter((NewsItem.publish_from.is_(None)) | (NewsItem.publish_from <= now))
q = q.filter((NewsItem.publish_until.is_(None)) | (NewsItem.publish_until >= now))
q = q.outerjoin(
NewsRead,
(NewsRead.news_item_id == NewsItem.id) & (NewsRead.user_id == uid),
).filter(NewsRead.id.is_(None))
q = q.order_by(NewsItem.pinned.desc(), NewsItem.publish_from.desc().nullslast(), NewsItem.created_at.desc())
news_items = q.limit(10).all()
except Exception:
news_items = []
return render_template(
"main/dashboard.html",
inbox_count=inbox_count,
jobs_success_count=jobs_success_count,
jobs_success_override_count=jobs_success_override_count,
jobs_expected_count=jobs_expected_count,
jobs_warning_count=jobs_warning_count,
jobs_error_count=jobs_error_count,
jobs_missed_count=jobs_missed_count,
db_size_human=db_size_human,
free_disk_human=free_disk_human,
free_disk_warning=free_disk_warning,
news_items=news_items,
)
@main_bp.route("/logging")
@login_required
@roles_required("admin", "operator")
def logging_page():
# Server-side view of AdminLog entries.
try:
page = int(request.args.get("page", "1"))
except ValueError:
page = 1
if page < 1:
page = 1
per_page = 20
query = AdminLog.query.order_by(AdminLog.created_at.desc().nullslast(), AdminLog.id.desc())
total_items = query.count()
total_pages = max(1, math.ceil(total_items / per_page)) if total_items else 1
if page > total_pages:
page = total_pages
entries = (
query.offset((page - 1) * per_page)
.limit(per_page)
.all()
)
rows = []
for e in entries:
rows.append(
{
"created_at": _format_datetime(e.created_at),
"user": e.user or "",
"event_type": e.event_type or "",
"message": e.message or "",
"details": e.details or "",
}
)
return render_template(
"main/logging.html",
# The template expects `logs`.
logs=rows,
# Keep `rows` for backward-compatibility (if any).
rows=rows,
page=page,
total_pages=total_pages,
has_prev=page > 1,
has_next=page < total_pages,
)
@main_bp.route("/theme", methods=["POST"])
@login_required
def set_theme_preference():
# Accept both field names to stay compatible with templates/UI.
# The navbar theme select uses name="theme".
pref = (
request.form.get("theme_preference")
or request.form.get("theme")
or "auto"
).strip().lower()
if pref not in ("auto", "light", "dark"):
pref = "auto"
current_user.theme_preference = pref
db.session.commit()
return redirect(request.referrer or url_for("main.dashboard"))
@main_bp.route("/set-active-role", methods=["POST"])
@login_required
def set_active_role_route():
role = (request.form.get("active_role") or "").strip()
try:
current_user.set_active_role(role)
except Exception:
# Fallback: store in session directly
role = (role or "").strip()
if role:
session["active_role"] = role
# Always redirect to dashboard to avoid landing on a page without permissions
return redirect(url_for("main.dashboard"))

View File

@ -0,0 +1,212 @@
from .routes_shared import * # noqa: F401,F403
@main_bp.route("/customers")
@login_required
@roles_required("admin", "operator", "viewer")
def customers():
items = Customer.query.order_by(Customer.name.asc()).all()
rows = []
for c in items:
# Count jobs linked to this customer
try:
job_count = c.jobs.count()
except Exception:
job_count = 0
rows.append(
{
"id": c.id,
"name": c.name,
"active": bool(c.active),
"job_count": job_count,
}
)
can_manage = current_user.is_authenticated and get_active_role() in ("admin", "operator")
return render_template(
"main/customers.html",
customers=rows,
can_manage=can_manage,
)
@main_bp.route("/customers/create", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def customers_create():
name = (request.form.get("name") or "").strip()
active = bool(request.form.get("active"))
if not name:
flash("Customer name is required.", "danger")
return redirect(url_for("main.customers"))
existing = Customer.query.filter_by(name=name).first()
if existing:
flash("Customer already exists.", "danger")
return redirect(url_for("main.customers"))
try:
customer = Customer(name=name, active=active)
db.session.add(customer)
db.session.commit()
flash("Customer created.", "success")
except Exception as exc:
db.session.rollback()
print(f"[customers] Failed to create customer: {exc}")
flash("Failed to create customer.", "danger")
return redirect(url_for("main.customers"))
@main_bp.route("/customers/<int:customer_id>/edit", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def customers_edit(customer_id: int):
customer = Customer.query.get_or_404(customer_id)
name = (request.form.get("name") or "").strip()
active = bool(request.form.get("active"))
if not name:
flash("Customer name is required.", "danger")
return redirect(url_for("main.customers"))
existing = Customer.query.filter(
Customer.id != customer.id, Customer.name == name
).first()
if existing:
flash("Customer already exists.", "danger")
return redirect(url_for("main.customers"))
try:
customer.name = name
customer.active = active
db.session.commit()
flash("Customer updated.", "success")
except Exception as exc:
db.session.rollback()
print(f"[customers] Failed to update customer: {exc}")
flash("Failed to update customer.", "danger")
return redirect(url_for("main.customers"))
@main_bp.route("/customers/<int:customer_id>/delete", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def customers_delete(customer_id: int):
customer = Customer.query.get_or_404(customer_id)
try:
db.session.delete(customer)
db.session.commit()
flash("Customer deleted.", "success")
except Exception as exc:
db.session.rollback()
print(f"[customers] Failed to delete customer: {exc}")
flash("Failed to delete customer.", "danger")
return redirect(url_for("main.customers"))
@main_bp.route("/customers/export.csv")
@login_required
@roles_required("admin", "operator")
def customers_export():
import csv
from io import StringIO
items = Customer.query.order_by(Customer.name.asc()).all()
buf = StringIO()
writer = csv.writer(buf)
writer.writerow(["name", "active"])
for c in items:
writer.writerow([c.name, "1" if c.active else "0"])
out = buf.getvalue().encode("utf-8")
return Response(
out,
mimetype="text/csv; charset=utf-8",
headers={"Content-Disposition": "attachment; filename=customers.csv"},
)
@main_bp.route("/customers/import", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def customers_import():
file = request.files.get("file")
if not file or not getattr(file, "filename", ""):
flash("No file selected.", "warning")
return redirect(url_for("main.customers"))
try:
raw = file.read()
text = raw.decode("utf-8-sig", errors="replace")
except Exception:
flash("Failed to read the uploaded file.", "danger")
return redirect(url_for("main.customers"))
import csv
from io import StringIO
created = 0
updated = 0
skipped = 0
try:
reader = csv.reader(StringIO(text))
rows = list(reader)
except Exception:
flash("Invalid CSV format.", "danger")
return redirect(url_for("main.customers"))
if not rows:
flash("CSV file is empty.", "warning")
return redirect(url_for("main.customers"))
# If first row looks like a header, skip it
header = [c.strip().lower() for c in (rows[0] or [])]
start_idx = 1 if ("name" in header or "customer" in header) else 0
for r in rows[start_idx:]:
if not r:
continue
name = (r[0] or "").strip()
if not name:
skipped += 1
continue
active_val = None
if len(r) >= 2:
a = (r[1] or "").strip().lower()
if a in ("1", "true", "yes", "y", "active"):
active_val = True
elif a in ("0", "false", "no", "n", "inactive"):
active_val = False
existing = Customer.query.filter_by(name=name).first()
if existing:
if active_val is not None:
existing.active = active_val
updated += 1
else:
skipped += 1
else:
c = Customer(name=name, active=True if active_val is None else active_val)
db.session.add(c)
created += 1
try:
db.session.commit()
flash(f"Import finished. Created: {created}, Updated: {updated}, Skipped: {skipped}.", "success")
except Exception as exc:
db.session.rollback()
current_app.logger.exception(f"Failed to import customers: {exc}")
flash("Failed to import customers.", "danger")
return redirect(url_for("main.customers"))

View File

@ -0,0 +1,469 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime, _get_or_create_settings, _apply_overrides_to_run, _infer_schedule_map_from_runs
# Grace window for today's Expected/Missed transition.
# A job is only marked Missed after the latest expected time plus this grace.
MISSED_GRACE_WINDOW = timedelta(hours=1)
@main_bp.route("/daily-jobs")
@login_required
@roles_required("admin", "operator", "viewer")
def daily_jobs():
# Determine target date (default: today) in Europe/Amsterdam
date_str = request.args.get("date")
try:
from zoneinfo import ZoneInfo
tz = _get_ui_timezone()
except Exception:
tz = None
try:
if date_str:
target_date = datetime.strptime(date_str, "%Y-%m-%d").date()
else:
target_date = datetime.now(tz).date() if tz else datetime.utcnow().date()
except Exception:
target_date = datetime.now(tz).date() if tz else datetime.utcnow().date()
settings = _get_or_create_settings()
missed_start_date = getattr(settings, "daily_jobs_start_date", None)
# Day window: treat run_at as UTC-naive timestamps stored in UTC (existing behavior)
# Note: if your DB stores local-naive timestamps, this still works because the same logic
# is used consistently in schedule inference and details.
if tz:
local_midnight = datetime(
year=target_date.year,
month=target_date.month,
day=target_date.day,
hour=0,
minute=0,
second=0,
tzinfo=tz,
)
start_of_day = local_midnight.astimezone(datetime_module.timezone.utc).replace(tzinfo=None)
end_of_day = (local_midnight + timedelta(days=1)).astimezone(datetime_module.timezone.utc).replace(tzinfo=None)
else:
start_of_day = datetime(
year=target_date.year,
month=target_date.month,
day=target_date.day,
hour=0,
minute=0,
second=0,
)
end_of_day = start_of_day + timedelta(days=1)
def _to_local(dt_utc):
if not dt_utc or not tz:
return dt_utc
try:
if dt_utc.tzinfo is None:
dt_utc = dt_utc.replace(tzinfo=datetime_module.timezone.utc)
return dt_utc.astimezone(tz)
except Exception:
return dt_utc
def _bucket_15min(dt_utc):
d = _to_local(dt_utc)
if not d:
return None
minute_bucket = (d.minute // 15) * 15
return f"{d.hour:02d}:{minute_bucket:02d}"
weekday_idx = target_date.weekday() # 0=Mon..6=Sun
jobs = (
Job.query.join(Customer, isouter=True)
.order_by(Customer.name.asc().nullslast(), Job.backup_software.asc(), Job.backup_type.asc(), Job.job_name.asc())
.all()
)
rows = []
for job in jobs:
schedule_map = _infer_schedule_map_from_runs(job.id)
expected_times = schedule_map.get(weekday_idx) or []
if not expected_times:
continue
runs_for_day = (
JobRun.query.filter(
JobRun.job_id == job.id,
JobRun.run_at >= start_of_day,
JobRun.run_at < end_of_day,
)
.order_by(JobRun.run_at.asc())
.all()
)
run_count = len(runs_for_day)
customer_name = job.customer.name if job.customer else ""
# Ticket/Remark indicators for this job on this date
# Tickets: active-from date should apply to subsequent runs until resolved.
has_active_ticket = False
has_active_remark = False
try:
t_exists = db.session.execute(
text(
"""
SELECT 1
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
AND t.active_from_date <= :target_date
AND (
t.resolved_at IS NULL
OR ((t.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE 'Europe/Amsterdam')::date) >= :target_date
)
LIMIT 1
"""
),
{"job_id": job.id, "target_date": target_date},
).first()
has_active_ticket = bool(t_exists)
r_exists = db.session.execute(
text(
"""
SELECT 1
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
AND COALESCE(
r.active_from_date,
((r.start_date AT TIME ZONE 'UTC' AT TIME ZONE 'Europe/Amsterdam')::date)
) <= :target_date
AND (
r.resolved_at IS NULL
OR ((r.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE 'Europe/Amsterdam')::date) >= :target_date
)
LIMIT 1
"""
),
{"job_id": job.id, "target_date": target_date},
).first()
has_active_remark = bool(r_exists)
except Exception:
has_active_ticket = False
has_active_remark = False
# We show a single row per job for today.
last_remark_excerpt = ""
last_override_applied = False
if run_count > 0:
last_run = runs_for_day[-1]
try:
status_display, override_applied, override_level, _ov_id, _ov_reason = _apply_overrides_to_run(job, last_run)
# Expose override information so the status dot can render the blue override indicator.
last_override_applied = bool(override_applied)
# If this run is flagged as missed, ensure we always expose a
# concrete status so the UI can render the missed status dot.
if getattr(last_run, "missed", False):
last_status = status_display or "Missed"
else:
last_status = status_display or (last_run.status or "-")
except Exception:
last_status = last_run.status or "-"
last_override_applied = False
display_time = _bucket_15min(last_run.run_at) or (expected_times[-1] if expected_times else "")
last_remark = getattr(last_run, "remark", None) or ""
last_remark_excerpt = " | ".join([ln.strip() for ln in str(last_remark).replace("\r\n", "\n").split("\n") if ln.strip()])
if len(last_remark_excerpt) > 140:
last_remark_excerpt = last_remark_excerpt[:137] + "..."
else:
# Always show the latest expected time for rows without runs.
latest_expected = expected_times[-1] if expected_times else ""
display_time = latest_expected
# Before the configured start date we do not mark runs as 'Missed',
# but we still show the expected time and (when applicable) an 'Expected' status.
try:
today_local = datetime.now(tz).date() if tz else datetime.utcnow().date()
except Exception:
today_local = datetime.utcnow().date()
if missed_start_date and target_date < missed_start_date:
if target_date > today_local:
last_status = "Expected"
elif target_date == today_local:
is_expected = False
try:
now_local = datetime.now(tz) if tz else datetime.utcnow()
latest_dt = None
for tstr in expected_times:
try:
parts = (tstr or "").strip().split(":")
if len(parts) < 2:
continue
hh = int(parts[0])
mm = int(parts[1])
dt_local = datetime.combine(target_date, datetime.min.time()).replace(hour=hh, minute=mm)
if tz:
dt_local = dt_local.replace(tzinfo=tz)
if latest_dt is None or dt_local > latest_dt:
latest_dt = dt_local
except Exception:
continue
# If we cannot parse any expected time, assume 'Expected'.
if latest_dt is None:
is_expected = True
else:
is_expected = now_local <= (latest_dt + MISSED_GRACE_WINDOW)
except Exception:
is_expected = True
last_status = "Expected" if is_expected else "-"
else:
last_status = "-"
else:
# For today and future dates, show 'Expected' until the latest expected time has passed.
if target_date > today_local:
last_status = "Expected"
elif target_date == today_local:
is_expected = False
try:
now_local = datetime.now(tz) if tz else datetime.utcnow()
latest_dt = None
for tstr in expected_times:
try:
parts = (tstr or "").strip().split(":")
if len(parts) < 2:
continue
hh = int(parts[0])
mm = int(parts[1])
dt_local = datetime.combine(target_date, datetime.min.time()).replace(hour=hh, minute=mm)
if tz:
dt_local = dt_local.replace(tzinfo=tz)
if latest_dt is None or dt_local > latest_dt:
latest_dt = dt_local
except Exception:
continue
# If we cannot parse any expected time, assume 'Expected' (better than incorrectly showing 'Missed')
if latest_dt is None:
is_expected = True
else:
is_expected = now_local <= (latest_dt + MISSED_GRACE_WINDOW)
except Exception:
is_expected = True
last_status = "Expected" if is_expected else "Missed"
else:
last_status = "Missed"
rows.append(
{
"job_id": job.id,
"customer_name": customer_name,
"backup_software": job.backup_software or "",
"backup_type": job.backup_type or "",
"job_name": job.job_name or "",
"expected_time": display_time,
"last_status": last_status,
"last_override_applied": bool(last_override_applied),
"run_count": run_count,
"has_active_ticket": bool(has_active_ticket),
"has_active_remark": bool(has_active_remark),
"last_remark_excerpt": last_remark_excerpt,
}
)
# Sort: Customer -> Backup -> Type -> Job name
# (case-insensitive, NULL/empty last by virtue of empty string)
rows.sort(
key=lambda r: (
(r.get("customer_name") or "").lower(),
(r.get("backup_software") or "").lower(),
(r.get("backup_type") or "").lower(),
(r.get("job_name") or "").lower(),
)
)
target_date_str = target_date.strftime("%Y-%m-%d")
return render_template("main/daily_jobs.html", rows=rows, target_date_str=target_date_str)
@main_bp.route("/daily-jobs/details")
@login_required
@roles_required("admin", "operator", "viewer")
def daily_jobs_details():
try:
job_id = int(request.args.get("job_id", "0"))
except ValueError:
job_id = 0
date_str = request.args.get("date")
if job_id <= 0 or not date_str:
return jsonify({"status": "error", "message": "Invalid parameters."}), 400
try:
target_date = datetime.strptime(date_str, "%Y-%m-%d").date()
except Exception:
return jsonify({"status": "error", "message": "Invalid date."}), 400
job = Job.query.get_or_404(job_id)
try:
from zoneinfo import ZoneInfo
tz = _get_ui_timezone()
except Exception:
tz = None
if tz:
local_midnight = datetime(
year=target_date.year,
month=target_date.month,
day=target_date.day,
hour=0,
minute=0,
second=0,
tzinfo=tz,
)
start_of_day = local_midnight.astimezone(datetime_module.timezone.utc).replace(tzinfo=None)
end_of_day = (local_midnight + timedelta(days=1)).astimezone(datetime_module.timezone.utc).replace(tzinfo=None)
else:
start_of_day = datetime(
year=target_date.year,
month=target_date.month,
day=target_date.day,
hour=0,
minute=0,
second=0,
)
end_of_day = start_of_day + timedelta(days=1)
runs_for_day = (
JobRun.query.filter(
JobRun.job_id == job.id,
JobRun.run_at >= start_of_day,
JobRun.run_at < end_of_day,
)
.order_by(JobRun.run_at.desc())
.all()
)
runs_payload = []
for run in runs_for_day:
msg = MailMessage.query.get(run.mail_message_id) if run.mail_message_id else None
mail_meta = None
has_eml = False
mail_message_id = run.mail_message_id
body_html = ""
if msg:
mail_meta = {
"from_address": msg.from_address or "",
"subject": msg.subject or "",
"received_at": _format_datetime(msg.received_at),
}
body_html = msg.html_body or ""
has_eml = bool(getattr(msg, "eml_stored_at", None))
objects_payload = []
# Preferred: read persisted objects for this run from run_object_links/customer_objects (Step 2).
try:
rows = (
db.session.execute(
text(
"""
SELECT
co.object_name AS name,
rol.status AS status,
rol.error_message AS error_message
FROM run_object_links rol
JOIN customer_objects co ON co.id = rol.customer_object_id
WHERE rol.run_id = :run_id
ORDER BY co.object_name ASC
"""
),
{"run_id": run.id},
)
.mappings()
.all()
)
for r in rows:
objects_payload.append(
{
"name": r.get("name") or "",
"type": "",
"status": r.get("status") or "",
"error_message": r.get("error_message") or "",
}
)
except Exception:
# Fallback for older data / during upgrades
try:
objects = run.objects.order_by(JobObject.object_name.asc()).all()
except Exception:
objects = list(run.objects or [])
for obj in objects:
objects_payload.append(
{
"name": obj.object_name,
"type": getattr(obj, "object_type", "") or "",
"status": obj.status or "",
"error_message": obj.error_message or "",
}
)
# If no run-linked objects exist yet, fall back to objects parsed/stored on the mail message.
if (not objects_payload) and msg:
try:
for mo in (
MailObject.query.filter_by(mail_message_id=msg.id)
.order_by(MailObject.object_name.asc())
.all()
):
objects_payload.append(
{
"name": mo.object_name or "",
"type": mo.object_type or "",
"status": mo.status or "",
"error_message": mo.error_message or "",
}
)
except Exception:
pass
status_display, override_applied, override_level, _ov_id, _ov_reason = _apply_overrides_to_run(job, run)
# Ensure missed runs always expose a concrete status for the UI.
if getattr(run, "missed", False):
status_display = status_display or "Missed"
# When a run is flagged as missed, it may not have a parser status.
# Normalize it so the UI can always show the missed dot.
if run.missed and not status_display:
status_display = "Missed"
runs_payload.append(
{
"id": run.id,
"run_at": _format_datetime(run.run_at),
"status": status_display,
"remark": run.remark or "",
"missed": bool(run.missed),
"override_applied": bool(override_applied),
"override_level": override_level,
"mail_message_id": mail_message_id,
"has_eml": bool(has_eml),
"mail": mail_meta,
"body_html": body_html,
"objects": objects_payload,
}
)
job_payload = {
"id": job.id,
"customer_name": job.customer.name if job.customer else "",
"backup_software": job.backup_software or "",
"backup_type": job.backup_type or "",
"job_name": job.job_name or "",
}
return jsonify({"status": "ok", "job": job_payload, "runs": runs_payload})

View File

@ -0,0 +1,261 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime
@main_bp.route("/feedback")
@login_required
@roles_required("admin", "operator", "viewer")
def feedback_page():
item_type = (request.args.get("type") or "").strip().lower()
if item_type not in ("", "bug", "feature"):
item_type = ""
# Default to showing both open and resolved items. Resolved items should remain
# visible for all users until an admin deletes them.
status = (request.args.get("status") or "all").strip().lower()
if status not in ("open", "resolved", "all"):
status = "all"
q = (request.args.get("q") or "").strip()
sort = (request.args.get("sort") or "votes").strip().lower()
if sort not in ("votes", "newest", "updated"):
sort = "votes"
where = ["fi.deleted_at IS NULL"]
params = {"user_id": int(current_user.id)}
if item_type:
where.append("fi.item_type = :item_type")
params["item_type"] = item_type
if status != "all":
where.append("fi.status = :status")
params["status"] = status
if q:
where.append("(fi.title ILIKE :q OR fi.description ILIKE :q OR COALESCE(fi.component,'') ILIKE :q)")
params["q"] = f"%{q}%"
where_sql = " AND ".join(where)
if sort == "newest":
order_sql = "fi.created_at DESC"
elif sort == "updated":
order_sql = "fi.updated_at DESC"
else:
order_sql = "vote_count DESC, fi.created_at DESC"
sql = text(
f"""
SELECT
fi.id,
fi.item_type,
fi.title,
fi.component,
fi.status,
fi.created_at,
fi.updated_at,
u.username AS created_by,
COALESCE(v.vote_count, 0) AS vote_count,
EXISTS (
SELECT 1
FROM feedback_votes fv
WHERE fv.feedback_item_id = fi.id
AND fv.user_id = :user_id
) AS user_voted
FROM feedback_items fi
JOIN users u ON u.id = fi.created_by_user_id
LEFT JOIN (
SELECT feedback_item_id, COUNT(*) AS vote_count
FROM feedback_votes
GROUP BY feedback_item_id
) v ON v.feedback_item_id = fi.id
WHERE {where_sql}
ORDER BY {order_sql}
LIMIT 500
"""
)
rows = db.session.execute(sql, params).mappings().all()
items = []
for r in rows:
items.append(
{
"id": int(r["id"]),
"item_type": (r["item_type"] or "").lower(),
"title": r["title"] or "",
"component": r["component"] or "",
"status": (r["status"] or "open").lower(),
"created_at": _format_datetime(r["created_at"]),
"updated_at": _format_datetime(r["updated_at"]),
"created_by": r["created_by"] or "-",
"vote_count": int(r["vote_count"] or 0),
"user_voted": bool(r["user_voted"]),
}
)
return render_template(
"main/feedback.html",
items=items,
item_type=item_type,
status=status,
q=q,
sort=sort,
)
@main_bp.route("/feedback/new", methods=["GET", "POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def feedback_new():
if request.method == "POST":
item_type = (request.form.get("item_type") or "").strip().lower()
if item_type not in ("bug", "feature"):
flash("Invalid type.", "danger")
return redirect(url_for("main.feedback_new"))
title = (request.form.get("title") or "").strip()
description = (request.form.get("description") or "").strip()
component = (request.form.get("component") or "").strip() or None
if not title or not description:
flash("Title and description are required.", "danger")
return redirect(url_for("main.feedback_new"))
item = FeedbackItem(
item_type=item_type,
title=title,
description=description,
component=component,
status="open",
created_by_user_id=int(current_user.id),
)
db.session.add(item)
db.session.commit()
flash("Feedback item created.", "success")
return redirect(url_for("main.feedback_detail", item_id=item.id))
return render_template("main/feedback_new.html")
@main_bp.route("/feedback/<int:item_id>")
@login_required
@roles_required("admin", "operator", "viewer")
def feedback_detail(item_id: int):
item = FeedbackItem.query.get_or_404(item_id)
if item.deleted_at is not None:
abort(404)
vote_count = (
db.session.query(db.func.count(FeedbackVote.id))
.filter(FeedbackVote.feedback_item_id == item.id)
.scalar()
or 0
)
user_voted = (
FeedbackVote.query.filter(
FeedbackVote.feedback_item_id == item.id,
FeedbackVote.user_id == int(current_user.id),
).first()
is not None
)
created_by = User.query.get(item.created_by_user_id)
created_by_name = created_by.username if created_by else "-"
resolved_by_name = ""
if item.resolved_by_user_id:
resolved_by = User.query.get(item.resolved_by_user_id)
resolved_by_name = resolved_by.username if resolved_by else ""
return render_template(
"main/feedback_detail.html",
item=item,
created_by_name=created_by_name,
resolved_by_name=resolved_by_name,
vote_count=int(vote_count),
user_voted=bool(user_voted),
)
@main_bp.route("/feedback/<int:item_id>/vote", methods=["POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def feedback_vote(item_id: int):
item = FeedbackItem.query.get_or_404(item_id)
if item.deleted_at is not None:
abort(404)
existing = FeedbackVote.query.filter(
FeedbackVote.feedback_item_id == item.id,
FeedbackVote.user_id == int(current_user.id),
).first()
if existing:
db.session.delete(existing)
db.session.commit()
flash("Vote removed.", "secondary")
else:
vote = FeedbackVote(
feedback_item_id=item.id,
user_id=int(current_user.id),
)
db.session.add(vote)
try:
db.session.commit()
flash("Voted.", "success")
except Exception:
db.session.rollback()
flash("Could not vote.", "danger")
ref = request.form.get("ref") or "detail"
if ref == "list":
return redirect(request.referrer or url_for("main.feedback_page"))
return redirect(url_for("main.feedback_detail", item_id=item.id))
@main_bp.route("/feedback/<int:item_id>/resolve", methods=["POST"])
@login_required
@roles_required("admin")
def feedback_resolve(item_id: int):
item = FeedbackItem.query.get_or_404(item_id)
if item.deleted_at is not None:
abort(404)
action = (request.form.get("action") or "resolve").strip().lower()
if action not in ("resolve", "reopen"):
action = "resolve"
if action == "resolve":
item.status = "resolved"
item.resolved_by_user_id = int(current_user.id)
item.resolved_at = datetime.utcnow()
flash("Marked as resolved.", "success")
else:
item.status = "open"
item.resolved_by_user_id = None
item.resolved_at = None
flash("Reopened.", "secondary")
db.session.commit()
return redirect(url_for("main.feedback_detail", item_id=item.id))
@main_bp.route("/feedback/<int:item_id>/delete", methods=["POST"])
@login_required
@roles_required("admin")
def feedback_delete(item_id: int):
item = FeedbackItem.query.get_or_404(item_id)
if item.deleted_at is not None:
abort(404)
item.deleted_at = datetime.utcnow()
item.deleted_by_user_id = int(current_user.id)
db.session.commit()
flash("Feedback item deleted.", "success")
return redirect(url_for("main.feedback_page"))

View File

@ -0,0 +1,675 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime, _log_admin_event, _send_mail_message_eml_download
import time
@main_bp.route("/inbox")
@login_required
@roles_required("admin", "operator", "viewer")
def inbox():
try:
page = int(request.args.get("page", "1"))
except ValueError:
page = 1
if page < 1:
page = 1
per_page = 50
query = MailMessage.query
# Use location column if available; otherwise just return all
if hasattr(MailMessage, "location"):
query = query.filter(MailMessage.location == "inbox")
total_items = query.count()
total_pages = max(1, math.ceil(total_items / per_page)) if total_items else 1
if page > total_pages:
page = total_pages
messages = (
query.order_by(
MailMessage.received_at.desc().nullslast(),
MailMessage.id.desc(),
)
.offset((page - 1) * per_page)
.limit(per_page)
.all()
)
rows = []
for msg in messages:
rows.append(
{
"id": msg.id,
"from_address": msg.from_address or "",
"subject": msg.subject or "",
"received_at": _format_datetime(msg.received_at),
"backup_software": msg.backup_software or "",
"backup_type": msg.backup_type or "",
"job_name": msg.job_name or "",
"parsed_at": _format_datetime(msg.parsed_at),
"overall_status": msg.overall_status or "",
"overall_message": (msg.overall_message or ""),
"has_eml": bool(getattr(msg, "eml_stored_at", None)),
}
)
# Customers list for autocomplete in popup
customers = Customer.query.order_by(Customer.name.asc()).all()
customer_rows = [{"id": c.id, "name": c.name} for c in customers]
has_prev = page > 1
has_next = page < total_pages
return render_template(
"main/inbox.html",
rows=rows,
page=page,
total_pages=total_pages,
has_prev=has_prev,
has_next=has_next,
customers=customer_rows,
)
@main_bp.route("/inbox/message/<int:message_id>")
@login_required
@roles_required("admin", "operator", "viewer")
def inbox_message_detail(message_id: int):
msg = MailMessage.query.get_or_404(message_id)
# Resolve customer name through linked Job, if any
customer_name = ""
if msg.job_id:
job = Job.query.get(msg.job_id)
if job and job.customer:
customer_name = job.customer.name or ""
meta = {
"id": msg.id,
"from_address": msg.from_address or "",
"subject": msg.subject or "",
"received_at": _format_datetime(msg.received_at),
"backup_software": msg.backup_software or "",
"backup_type": msg.backup_type or "",
"job_name": msg.job_name or "",
"parsed_at": _format_datetime(msg.parsed_at),
"has_eml": bool(getattr(msg, "eml_stored_at", None)),
"customer_name": customer_name,
"approved": bool(getattr(msg, "approved", False)),
"overall_status": msg.overall_status or "",
"overall_message": (getattr(msg, "overall_message", None) or ""),
"location": getattr(msg, "location", "") or "",
"deleted_at": _format_datetime(getattr(msg, "deleted_at", None)),
"deleted_by": (
(getattr(getattr(msg, "deleted_by_user", None), "username", "") or "")
if getattr(msg, "deleted_by_user", None)
else ""
),
}
if getattr(msg, "html_body", None):
body_html = msg.html_body
elif getattr(msg, "text_body", None):
escaped = (
msg.text_body.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
)
body_html = f"<pre>{escaped}</pre>"
else:
body_html = "<p>No message content stored.</p>"
from ..models import MailObject
objects = [
{
"name": obj.object_name,
"type": getattr(obj, "object_type", "") or "",
"status": obj.status or "",
"error_message": obj.error_message or "",
}
for obj in MailObject.query.filter_by(mail_message_id=msg.id).order_by(MailObject.object_name.asc()).all()
]
return jsonify({"status": "ok", "meta": meta, "body_html": body_html, "objects": objects})
@main_bp.route("/inbox/message/<int:message_id>/eml")
@login_required
@roles_required("admin", "operator", "viewer")
def inbox_message_eml(message_id: int):
msg = MailMessage.query.get_or_404(message_id)
return _send_mail_message_eml_download(msg)
@main_bp.route("/inbox/message/<int:message_id>/approve", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def inbox_message_approve(message_id: int):
msg = MailMessage.query.get_or_404(message_id)
# Only allow approval from inbox
if getattr(msg, "location", "inbox") != "inbox":
flash("This message is no longer in the Inbox and cannot be approved here.", "warning")
return redirect(url_for("main.inbox"))
customer_id_raw = request.form.get("customer_id", "").strip()
if not customer_id_raw:
flash("Please select a customer before approving.", "danger")
return redirect(url_for("main.inbox"))
try:
customer_id = int(customer_id_raw)
except ValueError:
flash("Invalid customer selection.", "danger")
return redirect(url_for("main.inbox"))
customer = Customer.query.get(customer_id)
if not customer:
flash("Selected customer not found.", "danger")
return redirect(url_for("main.inbox"))
# Find existing Job by unique key: From + Backup + Type + Job name
job = find_matching_job(msg)
if job:
# This key should be globally unique. If the selected customer differs, use the job's customer.
if job.customer_id != customer.id:
customer = Customer.query.get(job.customer_id) or customer
else:
# Create new Job for the selected customer using the same unique key fields
norm_from, store_backup, store_type, store_job = build_job_match_key(msg)
job = Job(
customer_id=customer.id,
from_address=norm_from,
backup_software=store_backup,
backup_type=store_type,
job_name=store_job,
active=True,
auto_approve=True,
)
db.session.add(job)
db.session.flush()
if not job:
job = Job(
customer_id=customer.id,
from_address=norm_from,
backup_software=store_backup,
backup_type=store_type,
job_name=norm_job,
)
db.session.add(job)
db.session.flush() # ensure job.id is available
# Create JobRun for this mail
run = JobRun(
job_id=job.id,
mail_message_id=msg.id,
# Some sources may not provide received_at; fall back to parsed_at/now.
run_at=(msg.received_at or getattr(msg, "parsed_at", None) or datetime.utcnow()),
status=msg.overall_status or None,
missed=False,
)
# Optional remark
if hasattr(run, "remark"):
run.remark = getattr(msg, "overall_message", None)
# Optional storage metrics (for capacity graphs)
if hasattr(run, 'storage_used_bytes') and hasattr(msg, 'storage_used_bytes'):
run.storage_used_bytes = msg.storage_used_bytes
if hasattr(run, 'storage_capacity_bytes') and hasattr(msg, 'storage_capacity_bytes'):
run.storage_capacity_bytes = msg.storage_capacity_bytes
if hasattr(run, 'storage_free_bytes') and hasattr(msg, 'storage_free_bytes'):
run.storage_free_bytes = msg.storage_free_bytes
if hasattr(run, 'storage_free_percent') and hasattr(msg, 'storage_free_percent'):
run.storage_free_percent = msg.storage_free_percent
db.session.add(run)
# Update mail message to reflect approval
msg.job_id = job.id
if hasattr(msg, "approved"):
msg.approved = True
if hasattr(msg, "approved_at"):
msg.approved_at = datetime.utcnow()
if hasattr(msg, "approved_by_id"):
msg.approved_by_id = current_user.id
if hasattr(msg, "location"):
msg.location = "history"
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
flash("Could not approve this job due to a database error.", "danger")
_log_admin_event("inbox_approve_error", f"Failed to approve message {msg.id}: {exc}")
return redirect(url_for("main.inbox"))
# Persist objects for reporting (must not block approval)
try:
persist_objects_for_approved_run(customer.id, job.id, run.id, msg.id)
except Exception as exc:
_log_admin_event(
"object_persist_error",
f"Object persistence failed for approved message {msg.id} (job {job.id}, run {run.id}): {exc}",
)
_log_admin_event(
"inbox_approve",
f"Approved message {msg.id} for customer '{customer.name}' as job {job.id}",
)
flash(f"Job approved for customer '{customer.name}'.", "success")
return redirect(url_for("main.inbox"))
@main_bp.route("/inbox/message/<int:message_id>/delete", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def inbox_message_delete(message_id: int):
msg = MailMessage.query.get_or_404(message_id)
# Only allow delete from inbox
if getattr(msg, "location", "inbox") != "inbox":
flash("This message is no longer in the Inbox and cannot be deleted here.", "warning")
return redirect(url_for("main.inbox"))
if hasattr(msg, "location"):
msg.location = "deleted"
if hasattr(msg, "deleted_at"):
msg.deleted_at = datetime.utcnow()
if hasattr(msg, "deleted_by_user_id"):
msg.deleted_by_user_id = current_user.id
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
flash("Could not delete this message due to a database error.", "danger")
_log_admin_event("inbox_delete_error", f"Failed to delete message {msg.id}: {exc}")
return redirect(url_for("main.inbox"))
_log_admin_event("inbox_delete", f"Deleted inbox message {msg.id}")
flash("Message deleted from Inbox.", "success")
return redirect(url_for("main.inbox"))
@main_bp.route("/inbox/deleted")
@login_required
@roles_required("admin")
def inbox_deleted_mails():
try:
page = int(request.args.get("page", "1"))
except ValueError:
page = 1
if page < 1:
page = 1
per_page = 50
query = MailMessage.query
if hasattr(MailMessage, "location"):
query = query.filter(MailMessage.location == "deleted")
total_items = query.count()
total_pages = max(1, math.ceil(total_items / per_page)) if total_items else 1
if page > total_pages:
page = total_pages
messages = (
query.order_by(
MailMessage.deleted_at.desc().nullslast(),
MailMessage.id.desc(),
)
.offset((page - 1) * per_page)
.limit(per_page)
.all()
)
rows = []
for msg in messages:
deleted_by_name = ""
try:
if getattr(msg, "deleted_by_user", None):
deleted_by_name = msg.deleted_by_user.username or ""
except Exception:
deleted_by_name = ""
rows.append(
{
"id": msg.id,
"from_address": msg.from_address or "",
"subject": msg.subject or "",
"received_at": _format_datetime(msg.received_at),
"deleted_at": _format_datetime(getattr(msg, "deleted_at", None)),
"deleted_by": deleted_by_name,
"has_eml": bool(getattr(msg, "eml_stored_at", None)),
}
)
has_prev = page > 1
has_next = page < total_pages
return render_template(
"main/inbox_deleted.html",
rows=rows,
page=page,
total_pages=total_pages,
has_prev=has_prev,
has_next=has_next,
)
@main_bp.route("/inbox/deleted/<int:message_id>/restore", methods=["POST"])
@login_required
@roles_required("admin")
def inbox_deleted_restore(message_id: int):
msg = MailMessage.query.get_or_404(message_id)
if getattr(msg, "location", "") != "deleted":
flash("This message is not marked as deleted.", "warning")
return redirect(url_for("main.inbox_deleted_mails"))
msg.location = "inbox"
if hasattr(msg, "deleted_at"):
msg.deleted_at = None
if hasattr(msg, "deleted_by_user_id"):
msg.deleted_by_user_id = None
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
flash("Could not restore this message due to a database error.", "danger")
_log_admin_event("inbox_restore_error", f"Failed to restore message {msg.id}: {exc}")
return redirect(url_for("main.inbox_deleted_mails"))
_log_admin_event("inbox_restore", f"Restored deleted inbox message {msg.id}")
flash("Message restored to Inbox.", "success")
return redirect(url_for("main.inbox_deleted_mails"))
@main_bp.route("/inbox/reparse-all", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def inbox_reparse_all():
# Re-run parsers for messages currently in the inbox.
# IMPORTANT: This action can be very expensive on a fresh environment with
# many inbox items. To avoid Gunicorn timeouts / gateway errors, we process
# in batches and stop once we approach a safe time budget. The user can run
# the action again to continue.
base_q = MailMessage.query
if hasattr(MailMessage, "location"):
base_q = base_q.filter(MailMessage.location == "inbox")
total = base_q.count()
# Keep batches small enough for a single HTTP request.
batch_size = int(current_app.config.get("INBOX_REPARSE_BATCH_SIZE", 200))
commit_every = int(current_app.config.get("INBOX_REPARSE_COMMIT_EVERY", 50))
# Stop before typical reverse-proxy / Gunicorn timeouts.
time_budget_s = float(current_app.config.get("INBOX_REPARSE_TIME_BUDGET_S", 20.0))
started_at = time.monotonic()
processed = 0
parsed_ok = 0
auto_approved = 0
auto_approved_runs = []
no_match = 0
errors = 0
partial = False
last_id = None
while True:
q = base_q
if last_id is not None:
q = q.filter(MailMessage.id < last_id)
# Use keyset pagination on id to avoid large OFFSET scans.
batch = q.order_by(MailMessage.id.desc()).limit(batch_size).all()
if not batch:
break
for msg in batch:
# If we are about to exceed the time budget, stop and return control.
if (time.monotonic() - started_at) >= time_budget_s:
partial = True
break
processed += 1
last_id = msg.id
try:
parse_mail_message(msg)
# Auto-approve if this job was already approved before (unique match across customers)
try:
# During re-parse we want to (re)apply auto-approve as well.
# Only attempt this for inbox messages with a successful parse that are not yet linked.
if (
getattr(msg, "location", "inbox") == "inbox"
and getattr(msg, "parse_result", None) == "ok"
and getattr(msg, "job_id", None) is None
):
# Match approved job on: From + Backup + Type + Job name
# Prevent session autoflush for every match lookup while we
# are still updating many messages in a loop.
with db.session.no_autoflush:
job = find_matching_job(msg)
if job:
# Respect per-job flags.
if hasattr(job, "active") and not bool(job.active):
raise Exception("job not active")
if hasattr(job, "auto_approve") and not bool(job.auto_approve):
raise Exception("job auto_approve disabled")
# Create a new run for the known job
run = JobRun(
job_id=job.id,
mail_message_id=msg.id,
# Some sources may not provide received_at; fall back to parsed_at/now.
run_at=(msg.received_at or getattr(msg, "parsed_at", None) or datetime.utcnow()),
status=msg.overall_status or None,
missed=False,
)
# Optional remark
if hasattr(run, "remark"):
run.remark = getattr(msg, "overall_message", None)
# Optional storage metrics (for capacity graphs)
if hasattr(run, "storage_used_bytes") and hasattr(msg, "storage_used_bytes"):
run.storage_used_bytes = msg.storage_used_bytes
if hasattr(run, "storage_capacity_bytes") and hasattr(msg, "storage_capacity_bytes"):
run.storage_capacity_bytes = msg.storage_capacity_bytes
if hasattr(run, "storage_free_bytes") and hasattr(msg, "storage_free_bytes"):
run.storage_free_bytes = msg.storage_free_bytes
if hasattr(run, "storage_free_percent") and hasattr(msg, "storage_free_percent"):
run.storage_free_percent = msg.storage_free_percent
db.session.add(run)
db.session.flush() # ensure run.id is available
auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id))
msg.job_id = job.id
if hasattr(msg, "approved"):
msg.approved = True
if hasattr(msg, "approved_at"):
msg.approved_at = datetime.utcnow()
if hasattr(msg, "approved_by_id"):
msg.approved_by_id = None
if hasattr(msg, "location"):
msg.location = "history"
auto_approved += 1
except Exception as _exc:
# Never fail the reparse due to auto-approve; keep message in inbox
current_app.logger.exception(
f"Auto-approve during reparse failed for message {getattr(msg,'id',None)}: {_exc}"
)
if msg.parse_result == "ok":
parsed_ok += 1
elif msg.parse_result == "no_match":
no_match += 1
else:
errors += 1
except Exception as exc:
errors += 1
msg.parse_result = "error"
msg.parse_error = str(exc)[:500]
# Commit periodically to keep the transaction small and to avoid
# autoflush overhead in subsequent queries.
if commit_every > 0 and (processed % commit_every) == 0:
try:
db.session.commit()
except Exception:
db.session.rollback()
if partial:
break
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
flash("Re-parse failed while saving results. See logs for details.", "danger")
_log_admin_event("reparse_inbox_error", f"Re-parse inbox failed to commit: {exc}")
return redirect(url_for("main.inbox"))
# Safety net: ensure auto-approve is also applied for all successfully parsed
# inbox messages without a job link. This covers cases where the in-loop
# auto-approve did not run due to session/flush issues.
if not partial:
try:
pending_q = MailMessage.query
if hasattr(MailMessage, "location"):
pending_q = pending_q.filter(MailMessage.location == "inbox")
pending_q = pending_q.filter(MailMessage.parse_result == "ok")
pending_q = pending_q.filter(MailMessage.job_id.is_(None))
pending = pending_q.order_by(MailMessage.received_at.desc()).all()
for msg in pending:
nested = db.session.begin_nested()
try:
with db.session.no_autoflush:
job = find_matching_job(msg)
if not job:
nested.commit()
continue
if hasattr(job, "active") and not bool(job.active):
nested.commit()
continue
if hasattr(job, "auto_approve") and not bool(job.auto_approve):
nested.commit()
continue
run = JobRun(
job_id=job.id,
mail_message_id=msg.id,
run_at=(msg.received_at or getattr(msg, "parsed_at", None) or datetime.utcnow()),
status=msg.overall_status or None,
missed=False,
)
if hasattr(run, "remark"):
run.remark = getattr(msg, "overall_message", None)
if hasattr(run, "storage_used_bytes") and hasattr(msg, "storage_used_bytes"):
run.storage_used_bytes = msg.storage_used_bytes
if hasattr(run, "storage_capacity_bytes") and hasattr(msg, "storage_capacity_bytes"):
run.storage_capacity_bytes = msg.storage_capacity_bytes
if hasattr(run, "storage_free_bytes") and hasattr(msg, "storage_free_bytes"):
run.storage_free_bytes = msg.storage_free_bytes
if hasattr(run, "storage_free_percent") and hasattr(msg, "storage_free_percent"):
run.storage_free_percent = msg.storage_free_percent
db.session.add(run)
db.session.flush()
auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id))
msg.job_id = job.id
if hasattr(msg, "approved"):
msg.approved = True
if hasattr(msg, "approved_at"):
msg.approved_at = datetime.utcnow()
if hasattr(msg, "approved_by_id"):
msg.approved_by_id = None
if hasattr(msg, "location"):
msg.location = "history"
auto_approved += 1
nested.commit()
except Exception as _exc:
# Roll back only this message's work (savepoint) and continue.
try:
nested.rollback()
except Exception:
# If rollback itself fails, fall back to a full session rollback.
db.session.rollback()
current_app.logger.exception(
f"Auto-approve safety net during reparse failed for message {getattr(msg,'id',None)}: {_exc}"
)
continue
db.session.commit()
except Exception as exc:
db.session.rollback()
current_app.logger.exception(f"Auto-approve safety net during reparse failed: {exc}")
# Persist objects for auto-approved runs (must not block the reparse)
# NOTE: On very large batches this can still be expensive, but we only run
# it after DB commits so the UI request does not fail mid-transaction.
if auto_approved_runs and not partial:
persisted_objects = 0
persisted_errors = 0
for (customer_id, job_id, run_id, mail_message_id) in auto_approved_runs:
try:
persisted_objects += persist_objects_for_approved_run(
customer_id, job_id, run_id, mail_message_id
)
except Exception as exc:
persisted_errors += 1
_log_admin_event(
"object_persist_error",
f"Object persistence failed for auto-approved message {mail_message_id} (job {job_id}, run {run_id}): {exc}",
)
_log_admin_event(
"object_persist_auto_approve",
f"Persisted objects for auto-approved runs. runs={len(auto_approved_runs)}, objects={persisted_objects}, errors={persisted_errors}",
)
log_msg = (
f"Re-parse inbox finished. processed={processed}/{total}, ok={parsed_ok}, "
f"auto_approved={auto_approved}, no_match={no_match}, errors={errors}, partial={partial}"
)
_log_admin_event("reparse_inbox", log_msg)
if partial:
remaining = max(0, total - processed)
flash("Re-parse started (batch mode).", "warning")
flash(
f"Processed: {processed}/{total}. Remaining: {remaining}. "
f"Parsed: {parsed_ok}, auto-approved: {auto_approved}, no match: {no_match}, errors: {errors}.",
"info",
)
else:
flash("Re-parse finished.", "success")
flash(
f"Total: {total}, parsed: {parsed_ok}, auto-approved: {auto_approved}, no match: {no_match}, errors: {errors}",
"info",
)
return redirect(url_for("main.inbox"))

View File

@ -0,0 +1,384 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import (
_apply_overrides_to_run,
_describe_schedule,
_format_datetime,
_get_ui_timezone_name,
_infer_schedule_map_from_runs,
_schedule_map_to_desc,
_to_amsterdam_date,
)
@main_bp.route("/jobs")
@login_required
@roles_required("admin", "operator", "viewer")
def jobs():
# Join with customers for display
jobs = (
Job.query
.outerjoin(Customer, Customer.id == Job.customer_id)
.add_columns(
Job.id,
Job.backup_software,
Job.backup_type,
Job.job_name,
Customer.name.label("customer_name"),
)
.order_by(Customer.name.asc().nullslast(), Job.backup_software.asc(), Job.backup_type.asc(), Job.job_name.asc())
.all()
)
rows = []
for row in jobs:
# Depending on SQLAlchemy version, row may be tuple-like
job_id = row.id
backup_software = row.backup_software
backup_type = row.backup_type
job_name = row.job_name
customer_name = getattr(row, "customer_name", None)
rows.append(
{
"id": job_id,
"customer_name": customer_name or "",
"backup_software": backup_software or "",
"backup_type": backup_type or "",
"job_name": job_name or "",
}
)
can_manage_jobs = current_user.is_authenticated and get_active_role() in ("admin", "operator")
return render_template(
"main/jobs.html",
jobs=rows,
can_manage_jobs=can_manage_jobs,
)
@main_bp.route("/jobs/<int:job_id>")
@login_required
@roles_required("admin", "operator", "viewer")
def job_detail(job_id: int):
job = Job.query.get_or_404(job_id)
# History pagination
try:
page = int(request.args.get("page", "1"))
except ValueError:
page = 1
if page < 1:
page = 1
per_page = 20
query = JobRun.query.filter_by(job_id=job.id)
total_items = query.count()
total_pages = max(1, math.ceil(total_items / per_page)) if total_items else 1
if page > total_pages:
page = total_pages
runs = (
query.order_by(JobRun.run_at.desc().nullslast(), JobRun.id.desc())
.offset((page - 1) * per_page)
.limit(per_page)
.all()
)
# Tickets: mark runs that fall within the ticket active window
ticket_rows = []
ticket_open_count = 0
ticket_total_count = 0
remark_rows = []
remark_open_count = 0
remark_total_count = 0
run_dates = []
run_date_map = {}
for r in runs:
rd = _to_amsterdam_date(r.run_at) or _to_amsterdam_date(datetime.utcnow())
run_date_map[r.id] = rd
if rd:
run_dates.append(rd)
if run_dates:
min_date = min(run_dates)
max_date = max(run_dates)
try:
rows = (
db.session.execute(
text(
"""
SELECT t.active_from_date, t.resolved_at, t.ticket_code
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
AND t.active_from_date <= :max_date
AND (
t.resolved_at IS NULL
OR ((t.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :min_date
)
"""
),
{"job_id": job.id, "min_date": min_date,
"ui_tz": _get_ui_timezone_name(), "max_date": max_date},
)
.mappings()
.all()
)
for rr in rows:
active_from = rr.get("active_from_date")
resolved_at = rr.get("resolved_at")
resolved_date = _to_amsterdam_date(resolved_at) if resolved_at else None
ticket_rows.append({"active_from_date": active_from, "resolved_date": resolved_date, "ticket_code": rr.get("ticket_code")})
except Exception:
ticket_rows = []
if run_dates:
min_date = min(run_dates)
max_date = max(run_dates)
try:
rows = (
db.session.execute(
text(
"""
SELECT COALESCE(
r.active_from_date,
((r.start_date AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date)
) AS active_from_date,
r.resolved_at,
r.title,
r.body
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
AND COALESCE(
r.active_from_date,
((r.start_date AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date)
) <= :max_date
AND (
r.resolved_at IS NULL
OR ((r.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :min_date
)
"""
),
{"job_id": job.id, "min_date": min_date,
"ui_tz": _get_ui_timezone_name(), "max_date": max_date},
)
.mappings()
.all()
)
for rr in rows:
active_from = rr.get("active_from_date")
resolved_at = rr.get("resolved_at")
resolved_date = _to_amsterdam_date(resolved_at) if resolved_at else None
remark_rows.append({"active_from_date": active_from, "resolved_date": resolved_date, "title": rr.get("title"), "body": rr.get("body")})
except Exception:
remark_rows = []
try:
ticket_total_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
"""
),
{"job_id": job.id},
).scalar() or 0
)
ticket_open_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
AND t.resolved_at IS NULL
"""
),
{"job_id": job.id},
).scalar() or 0
)
except Exception:
ticket_total_count = 0
ticket_open_count = 0
try:
remark_total_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
"""
),
{"job_id": job.id},
).scalar() or 0
)
remark_open_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
AND r.resolved_at IS NULL
"""
),
{"job_id": job.id},
).scalar() or 0
)
except Exception:
remark_total_count = 0
remark_open_count = 0
history_rows = []
for r in runs:
status_display, override_applied, _override_level, _ov_id, _ov_reason = _apply_overrides_to_run(job, r)
rd = run_date_map.get(r.id)
run_day = ""
if rd:
# rd is an Amsterdam-local date
_days = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
try:
run_day = _days[int(rd.weekday())]
except Exception:
run_day = ""
has_ticket = False
has_remark = False
ticket_codes = []
remark_items = []
if rd and ticket_rows:
for tr in ticket_rows:
af = tr.get("active_from_date")
resd = tr.get("resolved_date")
if af and af <= rd and (resd is None or resd >= rd):
has_ticket = True
code = (tr.get("ticket_code") or "").strip()
if code and code not in ticket_codes:
ticket_codes.append(code)
if rd and remark_rows:
for rr in remark_rows:
af = rr.get("active_from_date")
resd = rr.get("resolved_date")
if af and af <= rd and (resd is None or resd >= rd):
has_remark = True
title = (rr.get("title") or "").strip()
body = (rr.get("body") or "").strip()
remark_items.append({"title": title, "body": body})
history_rows.append(
{
"id": r.id,
"run_day": run_day,
"run_at": _format_datetime(r.run_at),
"status": status_display or "",
"remark": r.remark or "",
"missed": bool(r.missed),
"override_applied": bool(override_applied),
"has_ticket": bool(has_ticket),
"has_remark": bool(has_remark),
"ticket_codes": ticket_codes,
"remark_items": remark_items,
"mail_message_id": r.mail_message_id,
"reviewed_by": (r.reviewed_by.username if getattr(r, "reviewed_by", None) else ""),
"reviewed_at": _format_datetime(r.reviewed_at) if r.reviewed_at else "",
}
)
has_prev = page > 1
has_next = page < total_pages
can_manage_jobs = current_user.is_authenticated and get_active_role() in ("admin", "operator")
schedule_map = None
schedule_desc = _describe_schedule(job)
if schedule_desc.startswith("No schedule configured"):
schedule_map = _infer_schedule_map_from_runs(job.id)
schedule_desc = _schedule_map_to_desc(schedule_map)
else:
schedule_map = _infer_schedule_map_from_runs(job.id)
# For convenience, also load customer
customer = None
if job.customer_id:
customer = Customer.query.get(job.customer_id)
return render_template(
"main/job_detail.html",
job=job,
customer=customer,
schedule_desc=schedule_desc,
schedule_map=schedule_map,
history_rows=history_rows,
ticket_open_count=int(ticket_open_count or 0),
ticket_total_count=int(ticket_total_count or 0),
remark_open_count=int(remark_open_count or 0),
remark_total_count=int(remark_total_count or 0),
page=page,
total_pages=total_pages,
has_prev=has_prev,
has_next=has_next,
can_manage_jobs=can_manage_jobs,
)
@main_bp.route("/jobs/<int:job_id>/delete", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def job_delete(job_id: int):
job = Job.query.get_or_404(job_id)
try:
# Collect run ids for FK cleanup in auxiliary tables that may not have ON DELETE CASCADE
run_ids = []
mail_message_ids = []
for run in job.runs:
if run.id is not None:
run_ids.append(run.id)
if run.mail_message_id:
mail_message_ids.append(run.mail_message_id)
# Put related mails back into the inbox and unlink from job
if mail_message_ids:
msgs = MailMessage.query.filter(MailMessage.id.in_(mail_message_ids)).all()
for msg in msgs:
if hasattr(msg, "location"):
msg.location = "inbox"
msg.job_id = None
# Ensure run_object_links doesn't block job_runs deletion (older schemas may miss ON DELETE CASCADE)
if run_ids:
db.session.execute(
text("DELETE FROM run_object_links WHERE run_id IN :run_ids").bindparams(
bindparam("run_ids", expanding=True)
),
{"run_ids": run_ids},
)
# Ensure job_object_links doesn't block jobs deletion (older schemas may miss ON DELETE CASCADE)
if job.id is not None:
db.session.execute(
text("DELETE FROM job_object_links WHERE job_id = :job_id"),
{"job_id": job.id},
)
db.session.delete(job)
db.session.commit()
flash("Job deleted. Related mails are returned to the inbox.", "success")
except Exception as exc:
db.session.rollback()
print(f"[jobs] Failed to delete job: {exc}")
flash("Failed to delete job.", "danger")
return redirect(url_for("main.jobs"))

View File

@ -0,0 +1,32 @@
from .routes_shared import * # noqa: F401,F403
@main_bp.route("/news/read/<int:news_id>", methods=["POST"])
@login_required
def news_mark_read(news_id: int):
try:
item = NewsItem.query.get(news_id)
if not item:
return abort(404)
# Only mark as read when authenticated user exists.
uid = getattr(current_user, "id", None)
if not uid:
return abort(401)
existing = NewsRead.query.filter_by(news_item_id=news_id, user_id=uid).first()
if not existing:
db.session.add(NewsRead(news_item_id=news_id, user_id=uid))
db.session.commit()
except Exception as exc:
try:
db.session.rollback()
except Exception:
pass
print(f"[news] Failed to mark read: {exc}")
# Return to the previous page (dashboard by default)
ref = (request.headers.get("Referer") or "").strip()
if ref:
return redirect(ref)
return redirect(url_for("main.dashboard"))

View File

@ -0,0 +1,371 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime
# When no explicit start date is provided, we treat overrides as retroactive so they
# can be applied to existing (not-yet-reviewed) runs as well.
_OVERRIDE_DEFAULT_START_AT = datetime(1970, 1, 1)
@main_bp.route("/overrides")
@login_required
@roles_required("admin", "operator", "viewer")
def overrides():
can_manage = get_active_role() in ("admin", "operator")
can_delete = get_active_role() == "admin"
overrides_q = Override.query.order_by(Override.level.asc(), Override.start_at.desc()).all()
# Preload jobs for selection in the form (for object-level overrides)
jobs_for_select = (
Job.query.outerjoin(Customer, Job.customer_id == Customer.id)
.order_by(
Customer.name.asc(),
func.lower(func.coalesce(Job.backup_software, "")),
func.lower(func.coalesce(Job.backup_type, "")),
func.lower(func.coalesce(Job.job_name, "")),
)
.all()
)
# Preload configured backup software/types for global override selection
backup_software_options = [
r[0]
for r in db.session.query(Job.backup_software)
.filter(Job.backup_software.isnot(None), Job.backup_software != "")
.group_by(Job.backup_software)
.order_by(func.lower(Job.backup_software))
.all()
]
backup_type_options = [
r[0]
for r in db.session.query(Job.backup_type)
.filter(Job.backup_type.isnot(None), Job.backup_type != "")
.group_by(Job.backup_type)
.order_by(func.lower(Job.backup_type))
.all()
]
def _describe_scope(ov: Override) -> str:
lvl = (ov.level or "").lower()
details = []
if lvl == "global":
# Global: backup software / type based
if ov.backup_software:
details.append(ov.backup_software)
if ov.backup_type:
details.append(ov.backup_type)
scope = " / ".join(details) if details else "All jobs"
elif lvl == "object":
# Object-level: specific job (and optionally object name)
job = Job.query.get(ov.job_id) if ov.job_id else None
if job:
customer_name = job.customer.name if job.customer else ""
scope = f"{customer_name} / {job.backup_software or ''} / {job.backup_type or ''} / {job.job_name or ''}"
else:
scope = "(no job)"
if ov.object_name:
scope = scope + f" / object: {ov.object_name}"
else:
scope = "(unknown)"
# Append match criteria
crit = []
if ov.match_status:
crit.append(f"status == {ov.match_status}")
if ov.match_error_contains:
crit.append(f"error contains '{ov.match_error_contains}'")
if crit:
scope = scope + " [" + ", ".join(crit) + "]"
return scope
rows = []
for ov in overrides_q:
rows.append(
{
"id": ov.id,
"level": ov.level or "",
"scope": _describe_scope(ov),
"start_at": _format_datetime(ov.start_at),
"end_at": _format_datetime(ov.end_at) if ov.end_at else "",
"active": bool(ov.active),
"treat_as_success": bool(ov.treat_as_success),
"comment": ov.comment or "",
"match_status": ov.match_status or "",
"match_error_contains": ov.match_error_contains or "",
}
)
return render_template(
"main/overrides.html",
overrides=rows,
can_manage=can_manage,
can_delete=can_delete,
jobs_for_select=jobs_for_select,
backup_software_options=backup_software_options,
backup_type_options=backup_type_options,
)
@main_bp.route("/overrides/create", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def overrides_create():
level = (request.form.get("level") or "").lower().strip()
comment = (request.form.get("comment") or "").strip()
treat_as_success = bool(request.form.get("treat_as_success"))
backup_software = request.form.get("backup_software") or None
backup_type = request.form.get("backup_type") or None
job_id_val = request.form.get("job_id") or ""
job_id = int(job_id_val) if job_id_val.isdigit() else None
object_name = request.form.get("object_name") or None
match_status = (request.form.get("match_status") or "").strip() or None
match_error_contains = (request.form.get("match_error_contains") or "").strip() or None
start_at_str = request.form.get("start_at") or ""
end_at_str = request.form.get("end_at") or ""
now = datetime.utcnow()
try:
if start_at_str:
start_at = datetime.strptime(start_at_str, "%Y-%m-%dT%H:%M")
else:
start_at = _OVERRIDE_DEFAULT_START_AT
except Exception:
start_at = _OVERRIDE_DEFAULT_START_AT
end_at = None
if end_at_str:
try:
end_at = datetime.strptime(end_at_str, "%Y-%m-%dT%H:%M")
except Exception:
end_at = None
# Only support global and object level via UI
if level not in ("global", "object"):
flash("Invalid override level.", "danger")
return redirect(url_for("main.overrides"))
ov = Override(
level=level,
backup_software=backup_software if level == "global" else None,
backup_type=backup_type if level == "global" else None,
job_id=job_id if level == "object" else None,
object_name=object_name if level == "object" else None,
match_status=match_status,
match_error_contains=match_error_contains,
treat_as_success=treat_as_success,
active=True,
comment=comment,
created_by=current_user.username,
start_at=start_at,
end_at=end_at,
)
db.session.add(ov)
db.session.commit()
# Apply the new override to already existing runs so the UI reflects it immediately.
try:
job_ids = None
if ov.level == "object" and ov.job_id:
job_ids = [ov.job_id]
elif ov.level == "global":
q = Job.query
if ov.backup_software:
q = q.filter(func.lower(Job.backup_software) == func.lower(ov.backup_software))
if ov.backup_type:
q = q.filter(func.lower(Job.backup_type) == func.lower(ov.backup_type))
job_ids = [j.id for j in q.all()]
_recompute_override_flags_for_runs(job_ids=job_ids, start_at=ov.start_at, end_at=ov.end_at, only_unreviewed=True)
except Exception:
pass
flash("Override created.", "success")
return redirect(url_for("main.overrides"))
@main_bp.route("/overrides/update/<int:override_id>", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def overrides_update(override_id: int):
ov = Override.query.get_or_404(override_id)
# Keep old scope to recompute affected runs in both old and new scope.
old_level = ov.level
old_backup_software = ov.backup_software
old_backup_type = ov.backup_type
old_job_id = ov.job_id
old_start_at = ov.start_at
old_end_at = ov.end_at
level = (request.form.get("level") or "").lower().strip()
comment = (request.form.get("comment") or "").strip()
treat_as_success = bool(request.form.get("treat_as_success"))
backup_software = request.form.get("backup_software") or None
backup_type = request.form.get("backup_type") or None
job_id_val = request.form.get("job_id") or ""
job_id = int(job_id_val) if job_id_val.isdigit() else None
object_name = request.form.get("object_name") or None
match_status = (request.form.get("match_status") or "").strip() or None
match_error_contains = (request.form.get("match_error_contains") or "").strip() or None
start_at_str = request.form.get("start_at") or ""
end_at_str = request.form.get("end_at") or ""
now = datetime.utcnow()
# If the field is left empty, treat it as retroactive (default start).
# Otherwise parse datetime-local.
if not start_at_str:
start_at = _OVERRIDE_DEFAULT_START_AT
else:
try:
start_at = datetime.strptime(start_at_str, "%Y-%m-%dT%H:%M")
except Exception:
start_at = ov.start_at
try:
if end_at_str:
end_at = datetime.fromisoformat(end_at_str)
else:
end_at = None
except Exception:
end_at = None
if level not in ("global", "object"):
flash("Invalid override level.", "danger")
return redirect(url_for("main.overrides"))
ov.level = level
ov.backup_software = backup_software if level == "global" else None
ov.backup_type = backup_type if level == "global" else None
ov.job_id = job_id if level == "object" else None
ov.object_name = object_name if level == "object" else None
ov.match_status = match_status
ov.match_error_contains = match_error_contains
ov.treat_as_success = treat_as_success
ov.comment = comment
ov.start_at = start_at
ov.end_at = end_at
ov.updated_by = current_user.username
ov.updated_at = now
db.session.commit()
# Recompute for union of old and new affected jobs, only for unreviewed runs.
try:
job_ids = set()
def _job_ids_for_scope(level_val, bs, bt, jid):
if level_val == "object" and jid:
return {jid}
if level_val == "global":
q = Job.query
if bs:
q = q.filter(func.lower(Job.backup_software) == func.lower(bs))
if bt:
q = q.filter(func.lower(Job.backup_type) == func.lower(bt))
return {j.id for j in q.all()}
return set()
job_ids |= _job_ids_for_scope(old_level, old_backup_software, old_backup_type, old_job_id)
job_ids |= _job_ids_for_scope(ov.level, ov.backup_software, ov.backup_type, ov.job_id)
# Combine time windows so both old and new ranges are reprocessed.
combined_start = old_start_at if old_start_at else ov.start_at
if combined_start and ov.start_at and old_start_at:
combined_start = min(old_start_at, ov.start_at)
combined_end = old_end_at if old_end_at else ov.end_at
if combined_end and ov.end_at and old_end_at:
combined_end = max(old_end_at, ov.end_at)
_recompute_override_flags_for_runs(
job_ids=list(job_ids) if job_ids else None,
start_at=combined_start,
end_at=combined_end,
only_unreviewed=True,
)
except Exception:
pass
flash("Override updated.", "success")
return redirect(url_for("main.overrides"))
@main_bp.route("/overrides/delete/<int:override_id>", methods=["POST"])
@login_required
@roles_required("admin")
def overrides_delete(override_id: int):
ov = Override.query.get_or_404(override_id)
# Capture scope before delete for recompute.
level = ov.level
backup_software = ov.backup_software
backup_type = ov.backup_type
job_id = ov.job_id
start_at = ov.start_at
end_at = ov.end_at
db.session.delete(ov)
db.session.commit()
try:
job_ids = None
if level == "object" and job_id:
job_ids = [job_id]
elif level == "global":
q = Job.query
if backup_software:
q = q.filter(func.lower(Job.backup_software) == func.lower(backup_software))
if backup_type:
q = q.filter(func.lower(Job.backup_type) == func.lower(backup_type))
job_ids = [j.id for j in q.all()]
_recompute_override_flags_for_runs(job_ids=job_ids, start_at=start_at, end_at=end_at, only_unreviewed=True)
except Exception:
pass
flash("Override deleted.", "success")
return redirect(url_for("main.overrides"))
@main_bp.route("/overrides/<int:override_id>/toggle", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def overrides_toggle(override_id: int):
ov = Override.query.get_or_404(override_id)
ov.active = not bool(ov.active)
ov.updated_by = current_user.username
ov.updated_at = datetime.utcnow()
db.session.commit()
# Recompute existing runs for the affected scope.
try:
job_ids = None
if ov.level == "object" and ov.job_id:
job_ids = [ov.job_id]
elif ov.level == "global":
q = Job.query
if ov.backup_software:
q = q.filter(func.lower(Job.backup_software) == func.lower(ov.backup_software))
if ov.backup_type:
q = q.filter(func.lower(Job.backup_type) == func.lower(ov.backup_type))
job_ids = [j.id for j in q.all()]
_recompute_override_flags_for_runs(job_ids=job_ids, start_at=ov.start_at, end_at=ov.end_at, only_unreviewed=True)
except Exception:
pass
flash("Override status updated.", "success")
return redirect(url_for("main.overrides"))

View File

@ -0,0 +1,98 @@
from .routes_shared import * # noqa: F401,F403
@main_bp.route("/parsers")
@login_required
@roles_required("admin")
def parsers_overview():
# Only show what is currently implemented in code.
# Currently implemented parsers:
# - 3CX (Backup Complete notifications)
# - Veeam (status mails in multiple variants)
parsers = [
{
"name": "3CX backup complete",
"backup_software": "3CX",
"backup_types": [],
"order": 10,
"enabled": True,
"match": {
"subject_regex": r"^3CX Notification:\\s*Backup Complete\\s*-\\s*(.+)$",
},
"description": "Parses 3CX backup notifications (Backup Complete).",
"examples": [
{
"subject": "3CX Notification: Backup Complete - PBX01",
"from_address": "noreply@3cx.local",
"body_snippet": "Backup name: PBX01_2025-12-17.zip",
"parsed_result": {
"backup_software": "3CX",
"backup_type": "",
"job_name": "PBX01",
"objects": [
{
"name": "PBX01_2025-12-17.zip",
"status": "Success",
"error_message": "",
}
],
},
}
],
},
{
"name": "Veeam status mails",
"backup_software": "Veeam",
"backup_types": [
"Backup Job",
"Backup Copy Job",
"Replica Job",
"Replication job",
"Configuration Backup",
"Agent Backup job",
"Veeam Backup for Microsoft 365",
"Scale Out Back-up Repository",
],
"order": 20,
"enabled": True,
"match": {
"subject_regex": r"\\[(Success|Warning|Failed)\\]\\s*(.+)$",
},
"description": "Parses Veeam status mails. Job name/type are preferably extracted from the HTML header to avoid subject suffix noise.",
"examples": [
{
"subject": "[Warning] Daily-VM-Backup (3 objects) 1 warning",
"from_address": "veeam@customer.local",
"body_snippet": "Backup job: Daily-VM-Backup\\n...",
"parsed_result": {
"backup_software": "Veeam",
"backup_type": "Backup job",
"job_name": "Daily-VM-Backup",
"objects": [
{"name": "VM-APP01", "status": "Success", "error_message": ""},
{"name": "VM-DB01", "status": "Warning", "error_message": "Low disk space"},
],
},
},
{
"subject": "[Success] Offsite-Repository",
"from_address": "veeam@customer.local",
"body_snippet": "Backup Copy job: Offsite-Repository\\n...",
"parsed_result": {
"backup_software": "Veeam",
"backup_type": "Backup Copy job",
"job_name": "Offsite-Repository",
"objects": [
{"name": "Backup Copy Chain", "status": "Success", "error_message": ""}
],
},
},
],
},
]
return render_template(
"main/parsers.html",
parsers=parsers,
)

View File

@ -0,0 +1,64 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime
@main_bp.route("/remarks/<int:remark_id>", methods=["GET", "POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def remark_detail(remark_id: int):
remark = Remark.query.get_or_404(remark_id)
if request.method == "POST":
if get_active_role() not in ("admin", "operator"):
abort(403)
remark.body = (request.form.get("body") or "").strip() or ""
try:
db.session.commit()
flash("Remark updated.", "success")
except Exception as exc:
db.session.rollback()
flash(f"Failed to update remark: {exc}", "danger")
return redirect(url_for("main.remark_detail", remark_id=remark.id))
scopes = RemarkScope.query.filter(RemarkScope.remark_id == remark.id).order_by(RemarkScope.id.asc()).all()
runs = []
try:
rows = (
db.session.execute(
text(
"""
SELECT jr.id, jr.run_at, jr.status, j.job_name, c.name AS customer_name
FROM remark_job_runs rjr
JOIN job_runs jr ON jr.id = rjr.job_run_id
JOIN jobs j ON j.id = jr.job_id
LEFT JOIN customers c ON c.id = j.customer_id
WHERE rjr.remark_id = :remark_id
ORDER BY jr.run_at DESC
LIMIT 20
"""
),
{"remark_id": remark.id},
)
.mappings()
.all()
)
for r in rows:
runs.append(
{
"id": r.get("id"),
"run_at": _format_datetime(r.get("run_at")),
"status": r.get("status") or "",
"job_name": r.get("job_name") or "",
"customer_name": r.get("customer_name") or "",
}
)
except Exception:
runs = []
return render_template(
"main/remark_detail.html",
remark=remark,
scopes=scopes,
runs=runs,
)

View File

@ -0,0 +1,461 @@
from .routes_shared import * # noqa: F401,F403
from sqlalchemy import text
import json
import csv
import io
def _clamp_int(value, default: int, min_v: int, max_v: int) -> int:
try:
v = int(value)
except Exception:
v = default
if v < min_v:
v = min_v
if v > max_v:
v = max_v
return v
def _parse_iso_datetime(value: str) -> datetime:
value = (value or "").strip()
if not value:
raise ValueError("missing datetime")
# Accept "YYYY-MM-DD" as day start in UTC
if re.fullmatch(r"\d{4}-\d{2}-\d{2}", value):
return datetime.fromisoformat(value + "T00:00:00")
return datetime.fromisoformat(value)
def _require_reporting_role():
# Phase 1: admin/operator/reporter can do the same.
# Viewer is intentionally excluded.
if get_active_role() not in ("admin", "operator", "reporter"):
return abort(403)
return None
def _parse_customer_scope(payload: dict) -> tuple[str, list[int]]:
scope = (payload.get("customer_scope") or "all").strip().lower()
if scope not in ("all", "single", "multiple"):
scope = "all"
raw_ids = payload.get("customer_ids")
ids: list[int] = []
if isinstance(raw_ids, list):
for v in raw_ids:
try:
ids.append(int(v))
except Exception:
continue
if scope == "single":
if len(ids) != 1:
raise ValueError("Single customer scope requires exactly 1 customer.")
elif scope == "multiple":
if len(ids) < 1:
raise ValueError("Multiple customer scope requires at least 1 customer.")
else:
ids = []
return scope, ids
@main_bp.route("/api/reports/customers", methods=["GET"])
@login_required
def api_reports_customers():
err = _require_reporting_role()
if err is not None:
return err
rows = (
db.session.query(Customer)
.filter(Customer.active.is_(True))
.order_by(Customer.name.asc())
.all()
)
return {
"items": [
{"id": int(c.id), "name": c.name or ""}
for c in rows
]
}
@main_bp.route("/api/reports", methods=["GET"])
@login_required
def api_reports_list():
err = _require_reporting_role()
if err is not None:
return err
rows = (
db.session.query(ReportDefinition)
.order_by(ReportDefinition.created_at.desc())
.limit(200)
.all()
)
return {
"items": [
{
"id": r.id,
"name": r.name,
"description": r.description or "",
"report_type": r.report_type,
"output_format": r.output_format,
"customer_scope": getattr(r, "customer_scope", "all") or "all",
"customer_ids": (json.loads(r.customer_ids) if getattr(r, "customer_ids", None) else []),
"period_start": r.period_start.isoformat() if r.period_start else "",
"period_end": r.period_end.isoformat() if r.period_end else "",
"schedule": r.schedule or "",
"created_at": r.created_at.isoformat() if r.created_at else "",
}
for r in rows
]
}
@main_bp.route("/api/reports", methods=["POST"])
@login_required
def api_reports_create():
err = _require_reporting_role()
if err is not None:
return err
payload = request.get_json(silent=True) or {}
name = (payload.get("name") or "").strip() or "Report"
description = (payload.get("description") or "").strip() or None
report_type = (payload.get("report_type") or "one-time").strip() or "one-time"
output_format = (payload.get("output_format") or "csv").strip() or "csv"
schedule = (payload.get("schedule") or "").strip() or None
try:
customer_scope, customer_ids = _parse_customer_scope(payload)
except Exception as exc:
return {"error": str(exc)}, 400
period_start_raw = payload.get("period_start") or ""
period_end_raw = payload.get("period_end") or ""
try:
period_start = _parse_iso_datetime(period_start_raw)
period_end = _parse_iso_datetime(period_end_raw)
except Exception:
return {"error": "Invalid period_start or period_end. Use ISO datetime (YYYY-MM-DDTHH:MM:SS)."}, 400
if period_end <= period_start:
return {"error": "period_end must be after period_start."}, 400
r = ReportDefinition(
name=name,
description=description,
report_type=report_type,
output_format=output_format,
customer_scope=customer_scope,
customer_ids=json.dumps(customer_ids) if customer_ids else None,
period_start=period_start,
period_end=period_end,
schedule=schedule,
created_by_user_id=getattr(current_user, "id", None),
)
db.session.add(r)
db.session.commit()
return {"id": r.id}
@main_bp.route("/api/reports/<int:report_id>/generate", methods=["POST"])
@login_required
def api_reports_generate(report_id: int):
err = _require_reporting_role()
if err is not None:
return err
report = ReportDefinition.query.get_or_404(report_id)
scope = (getattr(report, "customer_scope", None) or "all").strip().lower()
raw_customer_ids = getattr(report, "customer_ids", None) or ""
customer_ids: list[int] = []
if raw_customer_ids:
try:
customer_ids = [int(v) for v in (json.loads(raw_customer_ids) or [])]
except Exception:
customer_ids = []
# Clear existing snapshot + summary rows for this report to make generation idempotent.
db.session.execute(text("DELETE FROM report_object_summaries WHERE report_id = :rid"), {"rid": report_id})
db.session.execute(text("DELETE FROM report_object_snapshots WHERE report_id = :rid"), {"rid": report_id})
# Snapshot generation (object-based)
# Join: run_object_links -> customer_objects -> customers
# run_object_links.run_id -> job_runs -> jobs
where_customer = ""
params = {"rid": report_id, "start_ts": report.period_start, "end_ts": report.period_end}
if scope in ("single", "multiple") and customer_ids:
where_customer = " AND c.id = ANY(:customer_ids) "
params["customer_ids"] = customer_ids
db.session.execute(
text(
f'''
INSERT INTO report_object_snapshots
(report_id, object_name, job_id, job_name, customer_id, customer_name,
backup_software, backup_type, run_id, run_at, status, missed,
override_applied, reviewed_at, ticket_number, remark, created_at)
SELECT
:rid AS report_id,
co.object_name AS object_name,
j.id AS job_id,
j.job_name AS job_name,
c.id AS customer_id,
c.name AS customer_name,
j.backup_software AS backup_software,
j.backup_type AS backup_type,
jr.id AS run_id,
jr.run_at AS run_at,
COALESCE(rol.status, jr.status) AS status,
COALESCE(jr.missed, FALSE) AS missed,
COALESCE(jr.override_applied, FALSE) AS override_applied,
jr.reviewed_at AS reviewed_at,
NULL AS ticket_number,
jr.remark AS remark,
NOW() AS created_at
FROM run_object_links rol
JOIN customer_objects co ON co.id = rol.customer_object_id
JOIN customers c ON c.id = co.customer_id
JOIN job_runs jr ON jr.id = rol.run_id
JOIN jobs j ON j.id = jr.job_id
WHERE jr.run_at IS NOT NULL
AND jr.run_at >= :start_ts
AND jr.run_at < :end_ts
{where_customer}
'''
),
params,
)
# Summary aggregation per object
db.session.execute(
text(
'''
INSERT INTO report_object_summaries
(report_id, object_name, total_runs, success_count, success_override_count,
warning_count, failed_count, missed_count, success_rate, created_at)
SELECT
:rid AS report_id,
s.object_name AS object_name,
COUNT(*)::INTEGER AS total_runs,
SUM(CASE WHEN (COALESCE(s.status,'') ILIKE 'success%' AND s.override_applied = FALSE) THEN 1 ELSE 0 END)::INTEGER AS success_count,
SUM(CASE WHEN (s.override_applied = TRUE) THEN 1 ELSE 0 END)::INTEGER AS success_override_count,
SUM(CASE WHEN (COALESCE(s.status,'') ILIKE 'warning%') THEN 1 ELSE 0 END)::INTEGER AS warning_count,
SUM(CASE WHEN (COALESCE(s.status,'') ILIKE 'fail%') THEN 1 ELSE 0 END)::INTEGER AS failed_count,
SUM(CASE WHEN (s.missed = TRUE) THEN 1 ELSE 0 END)::INTEGER AS missed_count,
CASE
WHEN COUNT(*) = 0 THEN 0.0
ELSE (
(
SUM(CASE WHEN (COALESCE(s.status,'') ILIKE 'success%' AND s.override_applied = FALSE) THEN 1 ELSE 0 END)
+ SUM(CASE WHEN (s.override_applied = TRUE) THEN 1 ELSE 0 END)
)::FLOAT / COUNT(*)::FLOAT
) * 100.0
END AS success_rate,
NOW() AS created_at
FROM report_object_snapshots s
WHERE s.report_id = :rid
GROUP BY s.object_name
'''
),
{"rid": report_id},
)
# Return lightweight stats for UI feedback.
snapshot_count = (
db.session.query(db.func.count(ReportObjectSnapshot.id))
.filter(ReportObjectSnapshot.report_id == report_id)
.scalar()
or 0
)
summary_count = (
db.session.query(db.func.count(ReportObjectSummary.id))
.filter(ReportObjectSummary.report_id == report_id)
.scalar()
or 0
)
db.session.commit()
return {"status": "ok", "snapshot_rows": int(snapshot_count), "summary_rows": int(summary_count)}
@main_bp.route("/api/reports/<int:report_id>/data", methods=["GET"])
@login_required
def api_reports_data(report_id: int):
err = _require_reporting_role()
if err is not None:
return err
ReportDefinition.query.get_or_404(report_id)
view = (request.args.get("view") or "summary").strip().lower()
if view not in ("summary", "snapshot"):
view = "summary"
limit = _clamp_int(request.args.get("limit"), default=100, min_v=1, max_v=500)
offset = _clamp_int(request.args.get("offset"), default=0, min_v=0, max_v=1_000_000)
if view == "summary":
q = db.session.query(ReportObjectSummary).filter(ReportObjectSummary.report_id == report_id)
total = q.count()
rows = (
q.order_by(ReportObjectSummary.object_name.asc())
.offset(offset)
.limit(limit)
.all()
)
return {
"view": "summary",
"total": int(total),
"limit": int(limit),
"offset": int(offset),
"items": [
{
"object_name": r.object_name or "",
"total_runs": int(r.total_runs or 0),
"success_count": int(r.success_count or 0),
"success_override_count": int(r.success_override_count or 0),
"warning_count": int(r.warning_count or 0),
"failed_count": int(r.failed_count or 0),
"missed_count": int(r.missed_count or 0),
"success_rate": float(r.success_rate or 0.0),
}
for r in rows
],
}
q = db.session.query(ReportObjectSnapshot).filter(ReportObjectSnapshot.report_id == report_id)
total = q.count()
rows = (
q.order_by(ReportObjectSnapshot.object_name.asc(), ReportObjectSnapshot.run_at.asc())
.offset(offset)
.limit(limit)
.all()
)
return {
"view": "snapshot",
"total": int(total),
"limit": int(limit),
"offset": int(offset),
"items": [
{
"object_name": r.object_name or "",
"customer_id": int(r.customer_id) if r.customer_id is not None else "",
"customer_name": r.customer_name or "",
"job_id": r.job_id or "",
"job_name": r.job_name or "",
"backup_software": r.backup_software or "",
"backup_type": r.backup_type or "",
"run_id": r.run_id or "",
"run_at": r.run_at.isoformat() if r.run_at else "",
"status": r.status or "",
"missed": bool(r.missed),
"override_applied": bool(r.override_applied),
"reviewed_at": r.reviewed_at.isoformat() if r.reviewed_at else "",
"ticket_number": r.ticket_number or "",
"remark": (r.remark or "").replace("\r", " ").replace("\n", " ").strip(),
}
for r in rows
],
}
@main_bp.route("/api/reports/<int:report_id>/export.csv", methods=["GET"])
@login_required
def api_reports_export_csv(report_id: int):
err = _require_reporting_role()
if err is not None:
return err
report = ReportDefinition.query.get_or_404(report_id)
view = (request.args.get("view") or "summary").strip().lower()
if view not in ("summary", "snapshot"):
view = "summary"
output = io.StringIO()
writer = csv.writer(output)
if view == "summary":
writer.writerow([
"object_name",
"total_runs",
"success_count",
"success_override_count",
"warning_count",
"failed_count",
"missed_count",
"success_rate",
])
rows = (
db.session.query(ReportObjectSummary)
.filter(ReportObjectSummary.report_id == report_id)
.order_by(ReportObjectSummary.object_name.asc())
.all()
)
for r in rows:
writer.writerow([
r.object_name or "",
int(r.total_runs or 0),
int(r.success_count or 0),
int(r.success_override_count or 0),
int(r.warning_count or 0),
int(r.failed_count or 0),
int(r.missed_count or 0),
float(r.success_rate or 0.0),
])
filename = f"report-{report_id}-summary.csv"
else:
writer.writerow([
"object_name",
"customer_id",
"customer_name",
"job_id",
"job_name",
"backup_software",
"backup_type",
"run_id",
"run_at",
"status",
"missed",
"override_applied",
"reviewed_at",
"ticket_number",
"remark",
])
rows = (
db.session.query(ReportObjectSnapshot)
.filter(ReportObjectSnapshot.report_id == report_id)
.order_by(ReportObjectSnapshot.object_name.asc(), ReportObjectSnapshot.run_at.asc())
.all()
)
for r in rows:
writer.writerow([
r.object_name or "",
r.customer_id or "",
r.customer_name or "",
r.job_id or "",
r.job_name or "",
r.backup_software or "",
r.backup_type or "",
r.run_id or "",
r.run_at.isoformat() if r.run_at else "",
r.status or "",
"1" if r.missed else "0",
"1" if r.override_applied else "0",
r.reviewed_at.isoformat() if r.reviewed_at else "",
r.ticket_number or "",
(r.remark or "").replace("\r", " ").replace("\n", " ").strip(),
])
filename = f"report-{report_id}-snapshot.csv"
csv_bytes = output.getvalue().encode("utf-8")
mem = io.BytesIO(csv_bytes)
mem.seek(0)
return send_file(mem, mimetype="text/csv", as_attachment=True, download_name=filename)

View File

@ -0,0 +1,23 @@
from .routes_shared import * # noqa: F401,F403
@main_bp.route("/reports")
@login_required
@roles_required("admin", "operator", "reporter", "viewer")
def reports():
# Defaults are used by the Reports UI for quick testing. All values are UTC.
period_end = datetime.utcnow().replace(microsecond=0)
period_start = (period_end - timedelta(days=7)).replace(microsecond=0)
return render_template(
"main/reports.html",
default_period_start=period_start.isoformat(),
default_period_end=period_end.isoformat(),
)
@main_bp.route("/reports/new")
@login_required
@roles_required("admin", "operator", "reporter", "viewer")
def reports_new():
return render_template("main/reports_new.html")

View File

@ -0,0 +1,693 @@
from __future__ import annotations
from datetime import date, datetime, time, timedelta, timezone
from flask import jsonify, render_template, request
from flask_login import current_user, login_required
from sqlalchemy import and_, or_, func, text
from .routes_shared import (
_apply_overrides_to_run,
_format_datetime,
_get_ui_timezone,
_get_ui_timezone_name,
_get_or_create_settings,
_infer_schedule_map_from_runs,
_to_amsterdam_date,
main_bp,
roles_required,
get_active_role,
)
from ..database import db
from ..models import Customer, Job, JobRun, JobRunReviewEvent, MailMessage, User
# Grace window for matching real runs to an expected schedule slot.
# A run within +/- 1 hour of the inferred schedule time counts as fulfilling the slot.
MISSED_GRACE_WINDOW = timedelta(hours=1)
def _utc_naive_from_local(dt_local: datetime) -> datetime:
"""Convert a timezone-aware local datetime to UTC naive, matching DB convention."""
if dt_local.tzinfo is None:
return dt_local
return dt_local.astimezone(timezone.utc).replace(tzinfo=None)
def _local_from_utc_naive(dt_utc_naive: datetime) -> datetime:
tz = _get_ui_timezone()
if not tz:
return dt_utc_naive
try:
if dt_utc_naive.tzinfo is None:
dt_utc_naive = dt_utc_naive.replace(tzinfo=timezone.utc)
return dt_utc_naive.astimezone(tz)
except Exception:
return dt_utc_naive
def _parse_hhmm(hhmm: str) -> tuple[int, int] | None:
try:
parts = (hhmm or "").strip().split(":")
if len(parts) < 2:
return None
hh = int(parts[0])
mm = int(parts[1])
if hh < 0 or hh > 23 or mm < 0 or mm > 59:
return None
return hh, mm
except Exception:
return None
def _get_default_missed_start_date() -> date:
# Prefer configured Daily Jobs missed start date.
settings = _get_or_create_settings()
if getattr(settings, "daily_jobs_start_date", None):
return settings.daily_jobs_start_date
# Sensible safety guard: do not generate missed runs for unbounded history.
return (_to_amsterdam_date(datetime.utcnow()) or datetime.utcnow().date()) - timedelta(days=90)
def _ensure_missed_runs_for_job(job: Job, start_from: date, end_inclusive: date) -> int:
"""Generate missed JobRun rows for scheduled slots without a run, so Run Checks can review them.
Returns number of inserted missed runs.
"""
tz = _get_ui_timezone()
schedule_map = _infer_schedule_map_from_runs(job.id) or {}
if not schedule_map:
return 0
today_local = _to_amsterdam_date(datetime.utcnow()) or datetime.utcnow().date()
if end_inclusive > today_local:
end_inclusive = today_local
now_local_dt = datetime.now(tz) if tz else datetime.utcnow()
# Remove any previously generated missed runs in this date window.
# Missed runs must be based on learned schedule from real mail-reported runs.
try:
start_local = datetime.combine(start_from, time.min)
end_local_excl = datetime.combine(end_inclusive + timedelta(days=1), time.min)
if tz:
start_local = start_local.replace(tzinfo=tz)
end_local_excl = end_local_excl.replace(tzinfo=tz)
start_utc_naive = _utc_naive_from_local(start_local)
end_utc_naive_excl = _utc_naive_from_local(end_local_excl)
db.session.query(JobRun).filter(
JobRun.job_id == job.id,
JobRun.missed.is_(True),
JobRun.mail_message_id.is_(None),
JobRun.reviewed_at.is_(None),
JobRun.run_at.isnot(None),
JobRun.run_at >= start_utc_naive,
JobRun.run_at < end_utc_naive_excl,
).delete(synchronize_session=False)
db.session.commit()
except Exception:
db.session.rollback()
inserted = 0
d = start_from
while d <= end_inclusive:
weekday = d.weekday()
times = schedule_map.get(weekday) or []
if not times:
d = d + timedelta(days=1)
continue
for hhmm in times:
hm = _parse_hhmm(hhmm)
if not hm:
continue
hh, mm = hm
local_dt = datetime.combine(d, time(hour=hh, minute=mm))
if tz:
local_dt = local_dt.replace(tzinfo=tz)
# Only generate missed runs for past slots.
if local_dt > now_local_dt:
continue
slot_utc_naive = _utc_naive_from_local(local_dt)
# Consider any real run near the slot as fulfilling the schedule.
# Also avoid duplicates if a missed run already exists.
window_start = slot_utc_naive - MISSED_GRACE_WINDOW
window_end = slot_utc_naive + MISSED_GRACE_WINDOW
exists = (
db.session.query(JobRun.id)
.filter(
JobRun.job_id == job.id,
JobRun.run_at.isnot(None),
or_(
and_(JobRun.missed.is_(False), JobRun.mail_message_id.isnot(None)),
and_(JobRun.missed.is_(True), JobRun.mail_message_id.is_(None)),
),
JobRun.run_at >= window_start,
JobRun.run_at <= window_end,
)
.first()
)
if exists:
continue
miss = JobRun(
job_id=job.id,
run_at=slot_utc_naive,
status="Missed",
missed=True,
remark=None,
mail_message_id=None,
)
db.session.add(miss)
inserted += 1
d = d + timedelta(days=1)
if inserted:
db.session.commit()
return inserted
@main_bp.route("/run-checks")
@login_required
@roles_required("admin", "operator")
def run_checks_page():
"""Run Checks page: list jobs that have runs to review (including generated missed runs)."""
include_reviewed = False
if get_active_role() == "admin":
include_reviewed = request.args.get("include_reviewed", "0") in ("1", "true", "yes", "on")
# Generate missed runs since the last review per job so they show up in Run Checks.
# This is intentionally best-effort; any errors should not block page load.
try:
settings_start = _get_default_missed_start_date()
last_reviewed_rows = (
db.session.query(JobRun.job_id, func.max(JobRun.reviewed_at))
.group_by(JobRun.job_id)
.all()
)
last_reviewed_map = {int(jid): (dt if dt else None) for jid, dt in last_reviewed_rows}
jobs = Job.query.all()
today_local = _to_amsterdam_date(datetime.utcnow()) or datetime.utcnow().date()
for job in jobs:
last_rev = last_reviewed_map.get(int(job.id))
if last_rev:
start_date = _to_amsterdam_date(last_rev) or settings_start
else:
start_date = settings_start
if start_date and start_date > today_local:
continue
_ensure_missed_runs_for_job(job, start_date, today_local)
except Exception:
# Don't block the page if missed-run generation fails.
pass
# Aggregated per-job rows
base = (
db.session.query(
Job.id.label("job_id"),
Job.job_name.label("job_name"),
Job.backup_software.label("backup_software"),
Job.backup_type.label("backup_type"),
Customer.name.label("customer_name"),
)
.select_from(Job)
.outerjoin(Customer, Customer.id == Job.customer_id)
)
# Runs to show in the overview: unreviewed (or all if admin toggle enabled)
run_filter = []
if not include_reviewed:
run_filter.append(JobRun.reviewed_at.is_(None))
# Last review per job must be derived from reviewed runs (independent of the overview run filter).
# The overview typically shows only unreviewed runs, so using the same filter would always yield NULL.
last_reviewed_ts = (
db.session.query(
JobRun.job_id.label("job_id"),
func.max(JobRun.reviewed_at).label("last_reviewed_at"),
)
.filter(JobRun.reviewed_at.isnot(None))
.group_by(JobRun.job_id)
.subquery()
)
last_reviewed_pick = (
db.session.query(
JobRun.job_id.label("job_id"),
func.max(JobRun.id).label("run_id"),
)
.join(
last_reviewed_ts,
(JobRun.job_id == last_reviewed_ts.c.job_id)
& (JobRun.reviewed_at == last_reviewed_ts.c.last_reviewed_at),
)
.group_by(JobRun.job_id)
.subquery()
)
last_reviewed = (
db.session.query(
JobRun.job_id.label("job_id"),
JobRun.reviewed_at.label("last_reviewed_at"),
JobRun.reviewed_by_user_id.label("last_reviewed_by_user_id"),
)
.join(last_reviewed_pick, JobRun.id == last_reviewed_pick.c.run_id)
.subquery()
)
agg = (
db.session.query(
JobRun.job_id.label("job_id"),
func.count(JobRun.id).label("run_count"),
func.max(func.coalesce(JobRun.run_at, JobRun.created_at)).label("last_run_ts"),
)
.group_by(JobRun.job_id)
)
if run_filter:
agg = agg.filter(*run_filter)
agg = agg.subquery()
q = (
base.join(agg, agg.c.job_id == Job.id)
.outerjoin(last_reviewed, last_reviewed.c.job_id == Job.id)
.add_columns(
last_reviewed.c.last_reviewed_at.label("last_reviewed_at"),
last_reviewed.c.last_reviewed_by_user_id.label("last_reviewed_by_user_id"),
)
)
# Sort for operational review: Customer > Backup > Type > Job
q = q.order_by(
Customer.name.asc().nullslast(),
Job.backup_software.asc().nullslast(),
Job.backup_type.asc().nullslast(),
Job.job_name.asc().nullslast(),
Job.id.asc(),
)
rows = q.limit(2000).all()
# Ensure override flags are up-to-date for the runs shown in this overview.
# The Run Checks modal computes override status on-the-fly, but the overview
# aggregates by persisted JobRun.override_applied. Keep those flags aligned
# so jobs with overridden runs do not stay orange (Warning).
try:
from .routes_shared import _recompute_override_flags_for_runs
job_ids_for_recompute = [int(r.job_id) for r in rows]
if job_ids_for_recompute:
_recompute_override_flags_for_runs(job_ids=job_ids_for_recompute, only_unreviewed=True)
except Exception:
pass
# Per-job status indicators for the overview table (counts per status).
job_ids = [int(r.job_id) for r in rows]
status_map: dict[int, dict[str, int]] = {}
if job_ids:
s_q = (
db.session.query(
JobRun.job_id.label("job_id"),
JobRun.status.label("status"),
JobRun.missed.label("missed"),
JobRun.override_applied.label("override_applied"),
func.count(JobRun.id).label("cnt"),
)
.filter(JobRun.job_id.in_(job_ids))
)
if run_filter:
s_q = s_q.filter(*run_filter)
s_q = s_q.group_by(JobRun.job_id, JobRun.status, JobRun.missed, JobRun.override_applied)
for jid, status, missed, override_applied, cnt in s_q.all():
job_id = int(jid)
label = (status or "").strip() or "Unknown"
if bool(missed) or (label.lower() == "missed"):
label = "Missed"
elif bool(override_applied):
# Keep the label consistent with other pages.
label = "Success (override)"
status_map.setdefault(job_id, {})
status_map[job_id][label] = status_map[job_id].get(label, 0) + int(cnt or 0)
# Map reviewed-by usernames for admins
reviewed_by_map: dict[int, str] = {}
if get_active_role() == "admin":
user_ids = sorted({int(r.last_reviewed_by_user_id) for r in rows if getattr(r, "last_reviewed_by_user_id", None)})
if user_ids:
users = User.query.filter(User.id.in_(user_ids)).all()
reviewed_by_map = {u.id: u.username for u in users}
# Ticket/Remark indicators (active today) for faster reviewing.
today_local = _to_amsterdam_date(datetime.utcnow()) or datetime.utcnow().date()
payload = []
for r in rows:
job_id = int(r.job_id)
# Status indicators for the overview (counts per status).
indicators: list[dict[str, object]] = []
counts = status_map.get(job_id, {})
if counts:
def _dot_for(label: str) -> str:
s = (label or "").strip().lower()
if s == "success":
return "dot-success"
if s == "warning":
return "dot-warning"
if s in ("failed", "error"):
return "dot-failed"
if s == "missed":
return "dot-missed"
if s == "expected":
return "dot-expected"
if "override" in s:
return "dot-override"
return ""
# Keep a stable order (actionable first).
preferred = [
"Failed",
"Error",
"Warning",
"Missed",
"Success",
"Expected",
"Success (override)",
]
seen = set()
for k in preferred:
if k in counts:
indicators.append({"status": k, "count": int(counts.get(k, 0) or 0), "dot": _dot_for(k)})
seen.add(k)
for k in sorted(counts.keys()):
if k not in seen:
indicators.append({"status": k, "count": int(counts.get(k, 0) or 0), "dot": _dot_for(k)})
has_active_ticket = False
has_active_remark = False
ui_tz = _get_ui_timezone_name()
try:
t_exists = db.session.execute(
text(
"""
SELECT 1
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
AND t.active_from_date <= :run_date
AND (
t.resolved_at IS NULL
OR ((t.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :run_date
)
LIMIT 1
"""
),
{"job_id": job_id, "run_date": today_local, "ui_tz": ui_tz},
).first()
has_active_ticket = bool(t_exists)
r_exists = db.session.execute(
text(
"""
SELECT 1
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
AND COALESCE(
r.active_from_date,
((r.start_date AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date)
) <= :run_date
AND (
r.resolved_at IS NULL
OR ((r.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :run_date
)
LIMIT 1
"""
),
{"job_id": job_id, "run_date": today_local, "ui_tz": ui_tz},
).first()
has_active_remark = bool(r_exists)
except Exception:
has_active_ticket = False
has_active_remark = False
last_run_ts = getattr(r, "last_run_ts", None)
last_run_at = _format_datetime(last_run_ts) if last_run_ts else ""
last_reviewed_at = getattr(r, "last_reviewed_at", None)
last_reviewed_by = reviewed_by_map.get(getattr(r, "last_reviewed_by_user_id", None), "")
payload.append(
{
"job_id": job_id,
"customer_name": r.customer_name or "-",
"job_name": r.job_name or "-",
"backup_software": r.backup_software or "-",
"backup_type": r.backup_type or "-",
"run_count": int(getattr(r, "run_count", 0) or 0),
"last_run_at": last_run_at or "-",
"status_counts": status_map.get(job_id, {}),
"status_indicators": indicators,
"has_active_ticket": bool(has_active_ticket),
"has_active_remark": bool(has_active_remark),
"last_reviewed_at": _format_datetime(last_reviewed_at) if (get_active_role() == "admin" and last_reviewed_at) else "",
"last_reviewed_by": last_reviewed_by if get_active_role() == "admin" else "",
}
)
return render_template(
"main/run_checks.html",
rows=payload,
is_admin=(get_active_role() == "admin"),
include_reviewed=include_reviewed,
)
@main_bp.route("/api/run-checks/details")
@login_required
@roles_required("admin", "operator")
def run_checks_details():
"""Return runs for a job for the Run Checks modal."""
try:
job_id = int(request.args.get("job_id", "0"))
except Exception:
job_id = 0
if job_id <= 0:
return jsonify({"status": "error", "message": "Invalid parameters."}), 400
include_reviewed = False
if get_active_role() == "admin":
include_reviewed = request.args.get("include_reviewed", "0") in ("1", "true", "yes", "on")
job = Job.query.get_or_404(job_id)
q = JobRun.query.filter(JobRun.job_id == job.id)
if not include_reviewed:
q = q.filter(JobRun.reviewed_at.is_(None))
runs = q.order_by(func.coalesce(JobRun.run_at, JobRun.created_at).desc(), JobRun.id.desc()).limit(400).all()
runs_payload = []
for run in runs:
msg = MailMessage.query.get(run.mail_message_id) if run.mail_message_id else None
mail_meta = None
has_eml = False
body_html = ""
if msg:
mail_meta = {
"from_address": msg.from_address or "",
"subject": msg.subject or "",
"received_at": _format_datetime(msg.received_at),
}
body_html = msg.html_body or ""
has_eml = bool(getattr(msg, "eml_stored_at", None))
objects_payload = []
try:
rows = (
db.session.execute(
text(
"""
SELECT
co.object_name AS name,
rol.status AS status,
rol.error_message AS error_message
FROM run_object_links rol
JOIN customer_objects co ON co.id = rol.customer_object_id
WHERE rol.run_id = :run_id
ORDER BY co.object_name ASC
"""
),
{"run_id": run.id},
)
.mappings()
.all()
)
for rr in rows:
objects_payload.append(
{
"name": rr.get("name") or "",
"type": "",
"status": rr.get("status") or "",
"error_message": rr.get("error_message") or "",
}
)
except Exception:
objects_payload = []
status_display = run.status or "-"
try:
status_display, _, _, _ov_id, _ov_reason = _apply_overrides_to_run(job, run)
except Exception:
status_display = run.status or "-"
runs_payload.append(
{
"id": run.id,
"run_at": _format_datetime(run.run_at) if run.run_at else "-",
"status": status_display,
"remark": run.remark or "",
"missed": bool(run.missed),
"is_reviewed": bool(run.reviewed_at),
"reviewed_at": _format_datetime(run.reviewed_at) if (get_active_role() == "admin" and run.reviewed_at) else "",
"mail_message_id": run.mail_message_id,
"has_eml": bool(has_eml),
"mail": mail_meta,
"body_html": body_html,
"objects": objects_payload,
}
)
job_payload = {
"id": job.id,
"customer_name": job.customer.name if job.customer else "",
"backup_software": job.backup_software or "",
"backup_type": job.backup_type or "",
"job_name": job.job_name or "",
}
if not runs_payload:
return jsonify({"status": "ok", "job": job_payload, "runs": [], "message": "No runs found."})
return jsonify({"status": "ok", "job": job_payload, "runs": runs_payload})
@main_bp.post("/api/run-checks/mark-reviewed")
@login_required
@roles_required("admin", "operator")
def api_run_checks_mark_reviewed():
data = request.get_json(silent=True) or {}
run_ids = data.get("run_ids") or []
job_ids = data.get("job_ids") or []
# Backwards compatible: accept either run_ids or job_ids.
ids: list[int] = []
if job_ids:
try:
ids = [int(x) for x in job_ids]
except Exception:
return jsonify({"status": "error", "message": "Invalid job_ids."}), 400
if not ids:
return jsonify({"status": "ok", "updated": 0, "skipped": 0})
runs = JobRun.query.filter(JobRun.job_id.in_(ids)).all()
else:
try:
run_ids = [int(x) for x in run_ids]
except Exception:
return jsonify({"status": "error", "message": "Invalid run_ids."}), 400
if not run_ids:
return jsonify({"status": "ok", "updated": 0, "skipped": 0})
runs = JobRun.query.filter(JobRun.id.in_(run_ids)).all()
now = datetime.utcnow()
updated = 0
skipped = 0
for run in runs:
if run.reviewed_at is not None:
skipped += 1
continue
run.reviewed_at = now
run.reviewed_by_user_id = current_user.id
db.session.add(
JobRunReviewEvent(
run_id=run.id,
action="REVIEWED",
actor_user_id=current_user.id,
)
)
updated += 1
db.session.commit()
return jsonify({"status": "ok", "updated": updated, "skipped": skipped})
@main_bp.post("/api/run-checks/unmark-reviewed")
@login_required
@roles_required("admin")
def api_run_checks_unmark_reviewed():
data = request.get_json(silent=True) or {}
run_ids = data.get("run_ids") or []
job_ids = data.get("job_ids") or []
note = data.get("note")
runs = []
if job_ids:
try:
job_ids = [int(x) for x in job_ids]
except Exception:
return jsonify({"status": "error", "message": "Invalid job_ids."}), 400
if not job_ids:
return jsonify({"status": "ok", "updated": 0, "skipped": 0})
runs = JobRun.query.filter(JobRun.job_id.in_(job_ids)).all()
else:
try:
run_ids = [int(x) for x in run_ids]
except Exception:
return jsonify({"status": "error", "message": "Invalid run_ids."}), 400
if not run_ids:
return jsonify({"status": "ok", "updated": 0, "skipped": 0})
runs = JobRun.query.filter(JobRun.id.in_(run_ids)).all()
updated = 0
skipped = 0
for run in runs:
if run.reviewed_at is None:
skipped += 1
continue
run.reviewed_at = None
run.reviewed_by_user_id = None
db.session.add(
JobRunReviewEvent(
run_id=run.id,
action="UNREVIEWED",
actor_user_id=current_user.id,
note=(str(note)[:2000] if note else None),
)
)
updated += 1
db.session.commit()
return jsonify({"status": "ok", "updated": updated, "skipped": skipped})

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,819 @@
import requests
from flask import current_app, g
import os
import io
import json
import re
import html as _html
import math
import datetime as datetime_module
from functools import wraps
from datetime import datetime, timedelta
from zoneinfo import ZoneInfo
from flask import (
Blueprint,
render_template,
abort,
request,
session,
redirect,
url_for,
flash,
jsonify,
Response,
send_file,
)
from flask_login import login_required, current_user, logout_user
from sqlalchemy import text, func, bindparam
from ..email_utils import normalize_from_address
from ..job_matching import build_job_match_key, find_matching_job
from ..database import db
from ..models import (
SystemSettings,
AdminLog,
Customer,
Job,
JobRun,
JobObject,
MailMessage,
MailObject,
Override,
User,
Ticket,
TicketScope,
TicketJobRun,
Remark,
RemarkScope,
RemarkJobRun,
FeedbackItem,
FeedbackVote,
NewsItem,
NewsRead,
ReportDefinition,
ReportObjectSnapshot,
ReportObjectSummary,
)
from ..mail_importer import run_manual_import, MailImportError
from ..parsers import parse_mail_message
from ..object_persistence import persist_objects_for_approved_run
main_bp = Blueprint("main", __name__)
def _parse_roles(role_str: str) -> list[str]:
raw = (role_str or "").strip()
if not raw:
return ["viewer"]
parts = [p.strip() for p in raw.split(",")]
roles = [p for p in parts if p]
return roles or ["viewer"]
def get_active_role() -> str:
"""Return the active role for the current session/user."""
if not current_user.is_authenticated:
return "viewer"
try:
return current_user.active_role
except Exception:
roles = _parse_roles(getattr(current_user, "role", ""))
selected = (session.get("active_role") or "").strip()
if selected and selected in roles:
return selected
session["active_role"] = roles[0]
return roles[0]
def get_user_roles() -> list[str]:
"""Return the list of roles assigned to the current user."""
if not current_user.is_authenticated:
return []
try:
roles_attr = getattr(current_user, "roles", None)
if roles_attr is None:
roles_attr = getattr(current_user, "role", "")
if isinstance(roles_attr, (list, tuple, set)):
roles = [str(r).strip() for r in roles_attr if str(r).strip()]
else:
roles = _parse_roles(str(roles_attr))
return roles
except Exception:
return _parse_roles(getattr(current_user, "role", ""))
@main_bp.app_context_processor
def _inject_role_context():
return {"active_role": get_active_role(), "user_roles": get_user_roles()}
def roles_required(*roles):
"""Require one of the given roles for the wrapped view."""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if not current_user.is_authenticated:
return abort(401)
if get_active_role() not in roles:
return abort(403)
return func(*args, **kwargs)
return wrapper
return decorator
def _send_mail_message_eml_download(msg: MailMessage):
"""Return a RFC822 (.eml) download response for a MailMessage.
Raises 404 when no blob is stored.
"""
if not msg:
return abort(404)
blob = getattr(msg, "eml_blob", None)
if not blob:
return abort(404)
filename = f"message-{msg.id}.eml"
return send_file(
io.BytesIO(blob),
as_attachment=True,
download_name=filename,
mimetype="message/rfc822",
)
@main_bp.route("/message/<int:message_id>/eml")
@login_required
@roles_required("admin", "operator", "viewer")
def message_eml(message_id: int):
"""Download the stored raw EML for any message (Inbox, History, linked to jobs/runs)."""
msg = MailMessage.query.get_or_404(message_id)
return _send_mail_message_eml_download(msg)
def _get_or_create_settings() -> SystemSettings:
settings = SystemSettings.query.first()
if settings is None:
settings = SystemSettings(
auto_import_enabled=False,
auto_import_interval_minutes=15,
auto_import_max_items=50,
manual_import_batch_size=50,
auto_import_cutoff_date=datetime.utcnow().date(),
ingest_eml_retention_days=7,
)
db.session.add(settings)
db.session.commit()
return settings
def _get_ui_timezone_name() -> str:
"""Return the configured UI timezone name (IANA), with a safe fallback."""
try:
settings = _get_or_create_settings()
name = (getattr(settings, "ui_timezone", None) or "").strip()
if name:
return name
except Exception:
pass
try:
return (current_app.config.get("TIMEZONE") or "Europe/Amsterdam").strip()
except Exception:
return "Europe/Amsterdam"
def _get_ui_timezone() -> ZoneInfo:
"""Return a ZoneInfo instance for UI rendering. Falls back to UTC if invalid."""
tz = getattr(g, "_ui_tz", None)
if tz is not None:
return tz
name = _get_ui_timezone_name()
try:
tz = ZoneInfo(name)
except Exception:
tz = ZoneInfo("UTC")
g._ui_tz = tz
return tz
def _format_bytes(num_bytes: int) -> str:
if num_bytes is None:
return "unknown"
step = 1024.0
units = ["B", "KB", "MB", "GB", "TB"]
size = float(num_bytes)
unit = 0
while size >= step and unit < len(units) - 1:
size /= step
unit += 1
return f"{size:.2f} {units[unit]}"
def _get_database_size_bytes():
try:
result = db.session.execute(text("SELECT pg_database_size(current_database())"))
return int(result.scalar() or 0)
except Exception as exc:
print(f"[settings] Failed to read database size: {exc}")
return None
def _get_free_disk_bytes():
try:
stat = os.statvfs("/")
return int(stat.f_bavail * stat.f_frsize)
except Exception as exc:
print(f"[settings] Failed to read free disk space: {exc}")
return None
def _format_datetime(dt):
if not dt:
return "-"
try:
tz = _get_ui_timezone()
if hasattr(dt, "tzinfo") and dt.tzinfo is None:
dt = dt.replace(tzinfo=datetime_module.timezone.utc)
if hasattr(dt, "astimezone"):
dt = dt.astimezone(tz)
return dt.strftime("%d-%m-%Y %H:%M:%S")
except Exception:
return "-"
def _apply_overrides_to_run(job: Job, run: JobRun):
"""Determine effective status for a run, taking overrides into account.
Returns a tuple:
(display_status, override_applied, override_level, override_id, override_reason)
override_level is one of: None, "global", "object".
override_id is the matched overrides.id when applied.
override_reason is a short human-readable reason for reporting.
"""
if not run:
return "", False, None, None, None
base_status = (run.status or "").strip() or "-"
# Some ingested runs only have created_at populated. Fall back to created_at
# so overrides can still be evaluated and recomputed retroactively.
run_at = getattr(run, "run_at", None) or getattr(run, "created_at", None)
# No timestamp at all -> do not attempt override matching.
if not run_at:
return base_status, False, None, None, None
def _reason_for(ov: Override) -> str:
parts = []
try:
parts.append(f"id={ov.id}")
except Exception:
pass
try:
lvl = (getattr(ov, "level", None) or "").strip()
if lvl:
parts.append(f"level={lvl}")
except Exception:
pass
try:
ms = (getattr(ov, "match_status", None) or "").strip()
if ms:
parts.append(f"status={ms}")
except Exception:
pass
try:
mec = (getattr(ov, "match_error_contains", None) or "").strip()
if mec:
parts.append(f"contains={mec}")
except Exception:
pass
try:
cm = (getattr(ov, "comment", None) or "").strip()
if cm:
parts.append(f"comment={cm}")
except Exception:
pass
return "; ".join(parts) or "override applied"
def _load_run_object_rows() -> list[dict]:
"""Load persisted run objects (run_object_links + customer_objects).
This is the primary source for object-level status/error reporting.
"""
try:
rows = (
db.session.execute(
text(
"""
SELECT co.object_name AS object_name,
rol.status AS status,
rol.error_message AS error_message
FROM run_object_links rol
JOIN customer_objects co ON co.id = rol.customer_object_id
WHERE rol.run_id = :run_id
ORDER BY co.object_name
"""
),
{"run_id": run.id},
)
.mappings()
.all()
)
return [dict(r) for r in rows]
except Exception:
return []
def _norm(s: str | None) -> str:
return (s or "").strip()
def _contains(haystack: str | None, needle: str | None) -> bool:
if not needle:
return True
if not haystack:
return False
return needle.lower() in haystack.lower()
def _matches_status(candidate: str | None, expected: str | None) -> bool:
if not expected:
return True
return _norm(candidate).lower() == _norm(expected).lower()
def _matches_object_name(candidate: str | None, expected: str | None) -> bool:
if not expected:
return True
cand = _norm(candidate)
exp = _norm(expected)
if not cand:
return False
# Support '*' wildcard for convenience (glob-style).
if "*" in exp:
try:
import fnmatch
return fnmatch.fnmatch(cand.lower(), exp.lower())
except Exception:
return cand.lower() == exp.lower()
return cand.lower() == exp.lower()
def _is_in_window(ov: Override) -> bool:
if not ov.active:
return False
if ov.start_at and run_at < ov.start_at:
return False
if ov.end_at and run_at > ov.end_at:
return False
return True
# Load all potentially applicable overrides. Object-level overrides take precedence.
try:
overrides_q = Override.query.filter(Override.active.is_(True)).all()
except Exception:
overrides_q = []
applicable_object_overrides: list[Override] = []
applicable_global_overrides: list[Override] = []
for ov in overrides_q:
lvl = (_norm(getattr(ov, "level", "")) or "").lower()
if lvl == "object":
if ov.job_id != job.id:
continue
if not _is_in_window(ov):
continue
applicable_object_overrides.append(ov)
elif lvl == "global":
if ov.backup_software and _norm(job.backup_software).lower() != _norm(ov.backup_software).lower():
continue
if ov.backup_type and _norm(job.backup_type).lower() != _norm(ov.backup_type).lower():
continue
if not _is_in_window(ov):
continue
applicable_global_overrides.append(ov)
# Persisted run-object rows (run_object_links + customer_objects) are the primary
# source for object-level error/status matching.
run_object_rows = _load_run_object_rows()
# Helper for evaluating a global override against the run itself.
def _matches_global(ov: Override) -> bool:
if not _matches_status(run.status, ov.match_status):
return False
# Global overrides should match both the run-level remark and any object-level error messages.
if ov.match_error_contains:
if _contains(run.remark, ov.match_error_contains):
return True
# Check persisted run-object error messages.
for row in run_object_rows or []:
if _contains(row.get("error_message"), ov.match_error_contains):
return True
objs = []
try:
objs = list(run.objects) if hasattr(run, "objects") else []
except Exception:
objs = []
for obj in objs or []:
if _contains(getattr(obj, "error_message", None), ov.match_error_contains):
return True
return False
return True
# Helper for evaluating an object override against objects within the run.
def _matches_object(ov: Override) -> bool:
# Prefer persisted rows.
for row in run_object_rows or []:
if not _matches_object_name(row.get("object_name"), ov.object_name):
continue
if not _matches_status(row.get("status"), ov.match_status):
continue
if not _contains(row.get("error_message"), ov.match_error_contains):
continue
return True
# Fallback to legacy JobObject relationship (older schemas).
objs = []
try:
objs = list(run.objects) if hasattr(run, "objects") else []
except Exception:
objs = []
for obj in objs or []:
if not _matches_object_name(getattr(obj, "object_name", None), ov.object_name):
continue
if not _matches_status(getattr(obj, "status", None), ov.match_status):
continue
if not _contains(getattr(obj, "error_message", None), ov.match_error_contains):
continue
return True
return False
# Evaluate object-level overrides first.
for ov in applicable_object_overrides:
if _matches_object(ov):
if ov.treat_as_success:
return "Success (override)", True, "object", ov.id, _reason_for(ov)
return base_status, True, "object", ov.id, _reason_for(ov)
# Evaluate global overrides.
for ov in applicable_global_overrides:
if _matches_global(ov):
if ov.treat_as_success:
return "Success (override)", True, "global", ov.id, _reason_for(ov)
return base_status, True, "global", ov.id, _reason_for(ov)
return base_status, False, None, None, None
def _recompute_override_flags_for_runs(job_ids: list[int] | None = None, start_at: datetime | None = None, end_at: datetime | None = None, only_unreviewed: bool = True) -> int:
"""Recompute JobRun.override_applied for already existing runs.
This is used when an override is created/toggled so existing runs immediately reflect the
current override configuration.
Returns number of updated runs.
"""
q = JobRun.query
if only_unreviewed:
q = q.filter(JobRun.reviewed_at.is_(None))
if job_ids:
q = q.filter(JobRun.job_id.in_(job_ids))
if start_at:
q = q.filter(func.coalesce(JobRun.run_at, JobRun.created_at) >= start_at)
if end_at:
q = q.filter(func.coalesce(JobRun.run_at, JobRun.created_at) <= end_at)
try:
runs = q.all()
except Exception:
runs = []
updated = 0
for run in runs:
job = None
try:
job = Job.query.get(run.job_id)
except Exception:
job = None
if not job:
continue
_status, applied, lvl, ov_id, ov_reason = _apply_overrides_to_run(job, run)
applied_bool = bool(applied)
changed = False
if bool(getattr(run, "override_applied", False)) != applied_bool:
run.override_applied = applied_bool
changed = True
# Populate reporting metadata (safe for older schemas that might not yet have columns).
try:
if getattr(run, "override_applied_override_id", None) != (ov_id if applied_bool else None):
run.override_applied_override_id = ov_id if applied_bool else None
changed = True
except Exception:
pass
try:
if getattr(run, "override_applied_level", None) != (lvl if applied_bool else None):
run.override_applied_level = lvl if applied_bool else None
changed = True
except Exception:
pass
try:
if getattr(run, "override_applied_reason", None) != (ov_reason if applied_bool else None):
run.override_applied_reason = ov_reason if applied_bool else None
changed = True
except Exception:
pass
if changed:
updated += 1
if updated:
try:
db.session.commit()
except Exception:
db.session.rollback()
return 0
return updated
def _log_admin_event(event_type: str, message: str, details: str | None = None) -> None:
"""Store an admin-level log entry and enforce a 7-day retention window."""
try:
username = current_user.username if current_user.is_authenticated else None
except Exception:
username = None
entry = AdminLog(
user=username,
event_type=event_type,
message=message,
details=details,
)
db.session.add(entry)
# Enforce retention: keep only the last 7 days
try:
cutoff = datetime.utcnow() - timedelta(days=7)
AdminLog.query.filter(AdminLog.created_at < cutoff).delete(synchronize_session=False)
except Exception:
# If cleanup fails we still try to commit the new entry
pass
try:
db.session.commit()
except Exception as exc:
db.session.rollback()
print(f"[admin-log] Failed to write log entry: {exc}")
# -------------------------
# Inbox
# -------------------------
# -------------------------
# Customers
# -------------------------
# -------------------------
# Jobs
# ------------
# -------------------------
# Customers import / export
# -------------------------
# -------------------------
# Jobs
# -------------------------
def _infer_schedule_map_from_runs(job_id: int):
"""Infer weekly schedule blocks (15-min) from historical runs.
Returns dict weekday->sorted list of 'HH:MM' strings in configured UI local time.
"""
schedule = {i: [] for i in range(7)} # 0=Mon .. 6=Sun
# Certain job types are informational and should never participate in schedule
# inference or Expected/Missed logic (no schedule is applicable).
try:
job = Job.query.get(job_id)
if job:
bs = (job.backup_software or '').strip().lower()
bt = (job.backup_type or '').strip().lower()
# Informational types that should never participate in schedule inference
# or Expected/Missed generation.
if bs == 'veeam' and bt == 'license key':
return schedule
if bs == 'synology' and bt == 'account protection':
return schedule
if bs == 'syncovery' and bt == 'syncovery':
return schedule
except Exception:
pass
try:
# Only infer schedules from real runs that came from mail reports.
# Synthetic "Missed" rows must never influence schedule inference.
runs = (
JobRun.query
.filter(
JobRun.job_id == job_id,
JobRun.run_at.isnot(None),
JobRun.missed.is_(False),
JobRun.mail_message_id.isnot(None),
)
.order_by(JobRun.run_at.desc())
.limit(500)
.all()
)
except Exception:
runs = []
if not runs:
return schedule
# Convert run_at to UI local time and bucket into 15-minute blocks
try:
tz = _get_ui_timezone()
except Exception:
tz = None
seen = {i: set() for i in range(7)}
for r in runs:
if not r.run_at:
continue
dt = r.run_at
if tz is not None:
try:
if dt.tzinfo is None:
# DB stores UTC naive timestamps. Convert them to configured UI timezone.
dt = dt.replace(tzinfo=datetime_module.timezone.utc).astimezone(tz)
else:
dt = dt.astimezone(tz)
except Exception:
pass
wd = dt.weekday()
minute_bucket = (dt.minute // 15) * 15
hh = dt.hour
tstr = f"{hh:02d}:{minute_bucket:02d}"
seen[wd].add(tstr)
for wd in range(7):
schedule[wd] = sorted(seen[wd])
return schedule
def _schedule_map_to_desc(schedule_map):
weekday_names = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
any_times = any(schedule_map.get(i) for i in range(7))
if not any_times:
return "No schedule configured yet (no runs found)."
parts = []
for i in range(7):
times = schedule_map.get(i) or []
if times:
parts.append(f"{weekday_names[i]}: " + ", ".join(times))
else:
parts.append(f"{weekday_names[i]}: —")
return " | ".join(parts)
def _describe_schedule(job: Job) -> str:
if not job:
return "No schedule configured."
if not job.schedule_type:
return "No schedule configured yet."
times = job.schedule_times or ""
times_display = times
if times:
try:
# try to normalize to comma-separated
if isinstance(times, str):
parts = [p.strip() for p in times.split(",") if p.strip()]
times_display = ", ".join(parts)
except Exception:
pass
stype = job.schedule_type.lower()
if stype == "daily":
if times_display:
return f"Runs daily at {times_display}."
return "Runs daily."
if stype == "weekly":
days = job.schedule_days_of_week or ""
if days and times_display:
return f"Runs weekly on {days} at {times_display}."
if days:
return f"Runs weekly on {days}."
return "Runs weekly."
if stype == "monthly":
dom = job.schedule_day_of_month
if dom and times_display:
return f"Runs monthly on day {dom} at {times_display}."
if dom:
return f"Runs monthly on day {dom}."
return "Runs monthly."
if stype == "yearly":
dom = job.schedule_day_of_month
if dom and times_display:
return f"Runs yearly on day {dom} at {times_display}."
if dom:
return f"Runs yearly on day {dom}."
return "Runs yearly."
return "No schedule configured."
# -------------------------
# Daily jobs / overrides / reports / logging
# -------------------------
# -------------------------
# Settings
# -------------------------
# ---------------------------------------------------------------------------
# Tickets & Remarks
# ---------------------------------------------------------------------------
def _amsterdam_tz():
# Backward-compatible helper: keep name but use configured UI timezone.
try:
return _get_ui_timezone()
except Exception:
return None
def _to_amsterdam_date(dt_utc_naive: datetime | None) -> datetime_module.date | None:
if not dt_utc_naive:
return None
tz = _amsterdam_tz()
if not tz:
return dt_utc_naive.date()
try:
if dt_utc_naive.tzinfo is None:
dt_utc = dt_utc_naive.replace(tzinfo=datetime_module.timezone.utc)
else:
dt_utc = dt_utc_naive.astimezone(datetime_module.timezone.utc)
return dt_utc.astimezone(tz).date()
except Exception:
return dt_utc_naive.date()
def _next_ticket_code(now_utc: datetime) -> str:
day = _to_amsterdam_date(now_utc) or now_utc.date()
prefix = f"T{day.strftime('%Y%m%d')}."
# Find max sequence for today
try:
max_code = (
db.session.query(Ticket.ticket_code)
.filter(Ticket.ticket_code.like(prefix + "%"))
.order_by(Ticket.ticket_code.desc())
.limit(1)
.scalar()
)
except Exception:
max_code = None
seq = 1
if max_code and max_code.startswith(prefix):
try:
seq = int(max_code.split(".")[-1]) + 1
except Exception:
seq = 1
return f"{prefix}{seq:04d}"

View File

@ -0,0 +1,335 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime
@main_bp.route("/tickets")
@login_required
@roles_required("admin", "operator", "viewer")
def tickets_page():
tab = (request.args.get("tab") or "tickets").strip().lower()
if tab not in ("tickets", "remarks"):
tab = "tickets"
active = (request.args.get("active") or "1").strip()
active_only = active != "0"
q = (request.args.get("q") or "").strip()
try:
customer_id = int(request.args.get("customer_id") or 0)
except Exception:
customer_id = 0
backup_software = (request.args.get("backup_software") or "").strip()
backup_type = (request.args.get("backup_type") or "").strip()
customers = Customer.query.order_by(Customer.name.asc()).all()
tickets = []
remarks = []
if tab == "tickets":
query = Ticket.query
if active_only:
query = query.filter(Ticket.resolved_at.is_(None))
if q:
like_q = f"%{q}%"
query = query.filter(
(Ticket.ticket_code.ilike(like_q))
| (Ticket.description.ilike(like_q))
)
if customer_id or backup_software or backup_type:
query = query.join(TicketScope, TicketScope.ticket_id == Ticket.id)
if customer_id:
query = query.filter(TicketScope.customer_id == customer_id)
if backup_software:
query = query.filter(TicketScope.backup_software == backup_software)
if backup_type:
query = query.filter(TicketScope.backup_type == backup_type)
query = query.order_by(Ticket.resolved_at.isnot(None), Ticket.start_date.desc())
tickets_raw = query.limit(500).all()
ticket_ids = [t.id for t in tickets_raw]
customer_map = {}
run_count_map = {}
if ticket_ids:
try:
rows = (
db.session.execute(
text(
"""
SELECT ts.ticket_id, c.name
FROM ticket_scopes ts
JOIN customers c ON c.id = ts.customer_id
WHERE ts.ticket_id = ANY(:ids)
AND ts.customer_id IS NOT NULL
"""
),
{"ids": ticket_ids},
)
.fetchall()
)
for tid, cname in rows:
customer_map.setdefault(int(tid), [])
if cname and cname not in customer_map[int(tid)]:
customer_map[int(tid)].append(cname)
except Exception:
customer_map = {}
try:
rows = (
db.session.execute(
text(
"""
SELECT ticket_id, COUNT(*)
FROM ticket_job_runs
WHERE ticket_id = ANY(:ids)
GROUP BY ticket_id
"""
),
{"ids": ticket_ids},
)
.fetchall()
)
for tid, cnt in rows:
run_count_map[int(tid)] = int(cnt or 0)
except Exception:
run_count_map = {}
for t in tickets_raw:
customers_for_ticket = customer_map.get(t.id) or []
if customers_for_ticket:
customer_display = customers_for_ticket[0]
if len(customers_for_ticket) > 1:
customer_display += f" +{len(customers_for_ticket)-1}"
else:
customer_display = "-"
# Scope summary: best-effort from first scope
scope_summary = "-"
first_job_id = None
try:
s = TicketScope.query.filter(TicketScope.ticket_id == t.id).order_by(TicketScope.id.asc()).first()
if s:
parts = []
if s.backup_software:
parts.append(s.backup_software)
if s.backup_type:
parts.append(s.backup_type)
if s.job_id:
first_job_id = int(s.job_id)
job = Job.query.get(s.job_id)
if job and job.job_name:
parts.append(job.job_name)
scope_summary = " / ".join([p for p in parts if p]) or "-"
except Exception:
scope_summary = "-"
tickets.append(
{
"id": t.id,
"ticket_code": t.ticket_code,
"description": t.description or "",
"active_from_date": str(getattr(t, "active_from_date", "") or ""),
"start_date": _format_datetime(t.start_date),
"resolved_at": _format_datetime(t.resolved_at) if t.resolved_at else "",
"active": t.resolved_at is None,
"customers": customer_display,
"scope_summary": scope_summary,
"linked_runs": run_count_map.get(t.id, 0),
"job_id": first_job_id,
}
)
else:
query = Remark.query
if active_only:
query = query.filter(Remark.resolved_at.is_(None))
if q:
like_q = f"%{q}%"
query = query.filter(Remark.body.ilike(like_q))
if customer_id or backup_software or backup_type:
query = query.join(RemarkScope, RemarkScope.remark_id == Remark.id)
if customer_id:
query = query.filter(RemarkScope.customer_id == customer_id)
if backup_software:
query = query.filter(RemarkScope.backup_software == backup_software)
if backup_type:
query = query.filter(RemarkScope.backup_type == backup_type)
query = query.order_by(Remark.resolved_at.isnot(None), Remark.start_date.desc())
remarks_raw = query.limit(500).all()
remark_ids = [r.id for r in remarks_raw]
customer_map = {}
run_count_map = {}
if remark_ids:
try:
rows = (
db.session.execute(
text(
"""
SELECT rs.remark_id, c.name
FROM remark_scopes rs
JOIN customers c ON c.id = rs.customer_id
WHERE rs.remark_id = ANY(:ids)
AND rs.customer_id IS NOT NULL
"""
),
{"ids": remark_ids},
)
.fetchall()
)
for rid, cname in rows:
customer_map.setdefault(int(rid), [])
if cname and cname not in customer_map[int(rid)]:
customer_map[int(rid)].append(cname)
except Exception:
customer_map = {}
try:
rows = (
db.session.execute(
text(
"""
SELECT remark_id, COUNT(*)
FROM remark_job_runs
WHERE remark_id = ANY(:ids)
GROUP BY remark_id
"""
),
{"ids": remark_ids},
)
.fetchall()
)
for rid, cnt in rows:
run_count_map[int(rid)] = int(cnt or 0)
except Exception:
run_count_map = {}
for r in remarks_raw:
customers_for_remark = customer_map.get(r.id) or []
if customers_for_remark:
customer_display = customers_for_remark[0]
if len(customers_for_remark) > 1:
customer_display += f" +{len(customers_for_remark)-1}"
else:
customer_display = "-"
scope_summary = "-"
first_job_id = None
try:
s = RemarkScope.query.filter(RemarkScope.remark_id == r.id).order_by(RemarkScope.id.asc()).first()
if s:
parts = []
if s.backup_software:
parts.append(s.backup_software)
if s.backup_type:
parts.append(s.backup_type)
if s.job_id:
first_job_id = int(s.job_id)
job = Job.query.get(s.job_id)
if job and job.job_name:
parts.append(job.job_name)
scope_summary = " / ".join([p for p in parts if p]) or "-"
except Exception:
scope_summary = "-"
preview = (r.body or "")
if len(preview) > 80:
preview = preview[:77] + "..."
remarks.append(
{
"id": r.id,
"preview": preview,
"start_date": _format_datetime(r.start_date) if r.start_date else "-",
"resolved_at": _format_datetime(r.resolved_at) if r.resolved_at else "",
"active": r.resolved_at is None,
"customers": customer_display,
"scope_summary": scope_summary,
"linked_runs": run_count_map.get(r.id, 0),
"job_id": first_job_id,
}
)
return render_template(
"main/tickets.html",
tab=tab,
active_only=active_only,
q=q,
customer_id=customer_id,
backup_software=backup_software,
backup_type=backup_type,
customers=customers,
tickets=tickets,
remarks=remarks,
)
@main_bp.route("/tickets/<int:ticket_id>", methods=["GET", "POST"])
@login_required
@roles_required("admin", "operator", "viewer")
def ticket_detail(ticket_id: int):
ticket = Ticket.query.get_or_404(ticket_id)
if request.method == "POST":
if get_active_role() not in ("admin", "operator"):
abort(403)
ticket.description = (request.form.get("description") or "").strip() or None
try:
db.session.commit()
flash("Ticket updated.", "success")
except Exception as exc:
db.session.rollback()
flash(f"Failed to update ticket: {exc}", "danger")
return redirect(url_for("main.ticket_detail", ticket_id=ticket.id))
# Scopes
scopes = TicketScope.query.filter(TicketScope.ticket_id == ticket.id).order_by(TicketScope.id.asc()).all()
# Linked runs
runs = []
try:
rows = (
db.session.execute(
text(
"""
SELECT jr.id, jr.run_at, jr.status, j.job_name, c.name AS customer_name
FROM ticket_job_runs tjr
JOIN job_runs jr ON jr.id = tjr.job_run_id
JOIN jobs j ON j.id = jr.job_id
LEFT JOIN customers c ON c.id = j.customer_id
WHERE tjr.ticket_id = :ticket_id
ORDER BY jr.run_at DESC
LIMIT 20
"""
),
{"ticket_id": ticket.id},
)
.mappings()
.all()
)
for r in rows:
runs.append(
{
"id": r.get("id"),
"run_at": _format_datetime(r.get("run_at")),
"status": r.get("status") or "",
"job_name": r.get("job_name") or "",
"customer_name": r.get("customer_name") or "",
}
)
except Exception:
runs = []
return render_template(
"main/ticket_detail.html",
ticket=ticket,
scopes=scopes,
runs=runs,
)

View File

@ -0,0 +1,38 @@
from flask import render_template, redirect, url_for, flash, request
from flask_login import login_required, current_user
from ..database import db
from .routes_shared import main_bp
@main_bp.route("/user-settings", methods=["GET", "POST"])
@login_required
def user_settings():
"""User self-service settings.
Currently allows the logged-in user to change their own password.
"""
if request.method == "POST":
current_password = request.form.get("current_password") or ""
new_password = (request.form.get("new_password") or "").strip()
confirm_password = (request.form.get("confirm_password") or "").strip()
if not current_user.check_password(current_password):
flash("Current password is incorrect.", "danger")
return render_template("main/user_settings.html")
if not new_password:
flash("New password is required.", "danger")
return render_template("main/user_settings.html")
if new_password != confirm_password:
flash("Passwords do not match.", "danger")
return render_template("main/user_settings.html")
current_user.set_password(new_password)
db.session.commit()
flash("Password updated.", "success")
return redirect(url_for("main.user_settings"))
return render_template("main/user_settings.html")

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,625 @@
from datetime import datetime
from flask_login import UserMixin
from flask import session, has_request_context
from werkzeug.security import generate_password_hash, check_password_hash
from .database import db
class User(db.Model, UserMixin):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
# username is the primary login identifier
username = db.Column(db.String(255), unique=True, nullable=False)
# email is kept for future use and may be NULL
email = db.Column(db.String(255), nullable=True)
password_hash = db.Column(db.String(255), nullable=False)
role = db.Column(db.String(50), nullable=False, default="viewer")
# UI theme preference: 'auto' (follow OS), 'light', 'dark'
theme_preference = db.Column(db.String(16), nullable=False, default="auto")
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
def set_password(self, password: str) -> None:
self.password_hash = generate_password_hash(password)
def check_password(self, password: str) -> bool:
return check_password_hash(self.password_hash, password)
@property
def roles(self) -> list[str]:
"""Return all assigned roles.
The database stores roles as a comma-separated string for backwards
compatibility with older schemas.
"""
raw = (self.role or "").strip()
if not raw:
return ["viewer"]
parts = [p.strip() for p in raw.split(",")]
roles = [p for p in parts if p]
return roles or ["viewer"]
@property
def active_role(self) -> str:
"""Return the currently active role for this user.
When a request context exists, the active role is stored in the session.
If the stored role is not assigned to the user, it falls back to the
first assigned role.
"""
default_role = self.roles[0]
if not has_request_context():
return default_role
selected = (session.get("active_role") or "").strip()
if selected and selected in self.roles:
return selected
session["active_role"] = default_role
return default_role
def set_active_role(self, role: str) -> None:
"""Set the active role in the current session (if possible)."""
if not has_request_context():
return
role = (role or "").strip()
if role and role in self.roles:
session["active_role"] = role
else:
session["active_role"] = self.roles[0]
@property
def is_admin(self) -> bool:
return self.active_role == "admin"
class SystemSettings(db.Model):
__tablename__ = "system_settings"
id = db.Column(db.Integer, primary_key=True)
# Graph / mail settings
graph_tenant_id = db.Column(db.String(255), nullable=True)
graph_client_id = db.Column(db.String(255), nullable=True)
graph_client_secret = db.Column(db.String(255), nullable=True)
graph_mailbox = db.Column(db.String(255), nullable=True)
incoming_folder = db.Column(db.String(255), nullable=True)
processed_folder = db.Column(db.String(255), nullable=True)
# Import configuration
auto_import_enabled = db.Column(db.Boolean, nullable=False, default=False)
auto_import_interval_minutes = db.Column(db.Integer, nullable=False, default=15)
auto_import_max_items = db.Column(db.Integer, nullable=False, default=50)
manual_import_batch_size = db.Column(db.Integer, nullable=False, default=50)
auto_import_cutoff_date = db.Column(db.Date, nullable=True)
# Debug storage: store raw EML in database for a limited retention window.
# 0 = disabled, 7/14 = retention days.
ingest_eml_retention_days = db.Column(db.Integer, nullable=False, default=7)
# Daily Jobs: from which date 'Missed' status should start to be applied.
daily_jobs_start_date = db.Column(db.Date, nullable=True)
# UI display timezone (IANA name). Used for rendering times in the web interface.
ui_timezone = db.Column(db.String(64), nullable=False, default="Europe/Amsterdam")
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
class AdminLog(db.Model):
__tablename__ = "admin_logs"
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
user = db.Column(db.String(255), nullable=True)
event_type = db.Column(db.String(64), nullable=False)
message = db.Column(db.Text, nullable=False)
details = db.Column(db.Text, nullable=True)
class Customer(db.Model):
__tablename__ = "customers"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), unique=True, nullable=False)
active = db.Column(db.Boolean, nullable=False, default=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
class Override(db.Model):
__tablename__ = "overrides"
id = db.Column(db.Integer, primary_key=True)
# Level of the override: global or object (job-level is no longer used)
level = db.Column(db.String(20), nullable=False)
# Scope for global overrides (optional wildcard fields)
backup_software = db.Column(db.String(255), nullable=True)
backup_type = db.Column(db.String(255), nullable=True)
# Scope for object overrides
job_id = db.Column(db.Integer, db.ForeignKey("jobs.id"), nullable=True)
object_name = db.Column(db.String(255), nullable=True)
# Matching criteria on object status / error message
match_status = db.Column(db.String(32), nullable=True)
match_error_contains = db.Column(db.String(255), nullable=True)
# Behaviour flags
treat_as_success = db.Column(db.Boolean, nullable=False, default=True)
active = db.Column(db.Boolean, nullable=False, default=True)
# Validity window
start_at = db.Column(db.DateTime, nullable=False)
end_at = db.Column(db.DateTime, nullable=True)
# Management metadata
comment = db.Column(db.Text, nullable=True)
created_by = db.Column(db.String(255), nullable=True)
updated_by = db.Column(db.String(255), nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
class Job(db.Model):
__tablename__ = "jobs"
id = db.Column(db.Integer, primary_key=True)
customer_id = db.Column(db.Integer, db.ForeignKey("customers.id"), nullable=True)
backup_software = db.Column(db.String(128), nullable=True)
backup_type = db.Column(db.String(128), nullable=True)
job_name = db.Column(db.String(512), nullable=True)
from_address = db.Column(db.String(512), nullable=True)
schedule_type = db.Column(db.String(32), nullable=True) # daily, weekly, monthly, yearly
schedule_days_of_week = db.Column(db.String(64), nullable=True) # e.g. "Mon,Tue,Wed"
schedule_day_of_month = db.Column(db.Integer, nullable=True) # 1-31
schedule_times = db.Column(db.String(255), nullable=True) # e.g. "01:00,13:15"
auto_approve = db.Column(db.Boolean, nullable=False, default=True)
active = db.Column(db.Boolean, nullable=False, default=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
customer = db.relationship(
"Customer",
backref=db.backref("jobs", lazy="dynamic"),
lazy="joined",
)
class JobRun(db.Model):
__tablename__ = "job_runs"
id = db.Column(db.Integer, primary_key=True)
job_id = db.Column(db.Integer, db.ForeignKey("jobs.id"), nullable=False)
mail_message_id = db.Column(db.Integer, db.ForeignKey("mail_messages.id"), nullable=True)
run_at = db.Column(db.DateTime, nullable=True)
status = db.Column(db.String(64), nullable=True)
remark = db.Column(db.Text, nullable=True)
missed = db.Column(db.Boolean, nullable=False, default=False)
override_applied = db.Column(db.Boolean, nullable=False, default=False)
# Override metadata for reporting/auditing.
# These are populated when override flags are recomputed.
override_applied_override_id = db.Column(db.Integer, nullable=True)
override_applied_level = db.Column(db.String(16), nullable=True)
override_applied_reason = db.Column(db.Text, nullable=True)
# Optional storage metrics (e.g. for repository capacity monitoring)
storage_used_bytes = db.Column(db.BigInteger, nullable=True)
storage_capacity_bytes = db.Column(db.BigInteger, nullable=True)
storage_free_bytes = db.Column(db.BigInteger, nullable=True)
storage_free_percent = db.Column(db.Float, nullable=True)
# Run review (Run Checks)
reviewed_at = db.Column(db.DateTime, nullable=True)
reviewed_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
job = db.relationship(
"Job",
backref=db.backref("runs", lazy="dynamic", cascade="all, delete-orphan"),
)
reviewed_by = db.relationship("User", foreign_keys=[reviewed_by_user_id])
class JobRunReviewEvent(db.Model):
__tablename__ = "job_run_review_events"
id = db.Column(db.Integer, primary_key=True)
run_id = db.Column(db.Integer, db.ForeignKey("job_runs.id"), nullable=False)
action = db.Column(db.String(32), nullable=False) # REVIEWED | UNREVIEWED
actor_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
note = db.Column(db.Text, nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
run = db.relationship(
"JobRun",
backref=db.backref("review_events", lazy="dynamic", cascade="all, delete-orphan"),
)
actor = db.relationship("User", foreign_keys=[actor_user_id])
class JobObject(db.Model):
__tablename__ = "job_objects"
id = db.Column(db.Integer, primary_key=True)
job_run_id = db.Column(db.Integer, db.ForeignKey("job_runs.id"), nullable=False)
object_name = db.Column(db.String(512), nullable=False)
object_type = db.Column(db.String(128), nullable=True)
status = db.Column(db.String(64), nullable=True)
error_message = db.Column(db.Text, nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
job_run = db.relationship(
"JobRun",
backref=db.backref("objects", lazy="dynamic", cascade="all, delete-orphan"),
)
class MailMessage(db.Model):
__tablename__ = "mail_messages"
id = db.Column(db.Integer, primary_key=True)
# Basic mail metadata
message_id = db.Column(db.String(512), unique=True, nullable=True)
from_address = db.Column(db.String(512), nullable=True)
subject = db.Column(db.String(1024), nullable=True)
received_at = db.Column(db.DateTime, nullable=True)
# Parsed backup metadata
backup_software = db.Column(db.String(128), nullable=True)
backup_type = db.Column(db.String(128), nullable=True)
job_name = db.Column(db.String(512), nullable=True)
from_address = db.Column(db.String(512), nullable=True)
overall_status = db.Column(db.String(32), nullable=True)
overall_message = db.Column(db.Text, nullable=True)
parse_result = db.Column(db.String(32), nullable=True)
parse_error = db.Column(db.String(512), nullable=True)
parsed_at = db.Column(db.DateTime, nullable=True)
# Optional storage metrics (e.g. repository capacity monitoring)
storage_used_bytes = db.Column(db.BigInteger, nullable=True)
storage_capacity_bytes = db.Column(db.BigInteger, nullable=True)
storage_free_bytes = db.Column(db.BigInteger, nullable=True)
storage_free_percent = db.Column(db.Float, nullable=True)
# Link back to Job and location (inbox/history)
job_id = db.Column(db.Integer, db.ForeignKey("jobs.id"), nullable=True)
location = db.Column(db.String(32), nullable=False, default="inbox")
# Raw / rendered content storage (for inline popup)
html_body = db.Column(db.Text, nullable=True)
text_body = db.Column(db.Text, nullable=True)
# Optional raw RFC822 message storage (debug) - controlled by SystemSettings.ingest_eml_retention_days
eml_blob = db.Column(db.LargeBinary, nullable=True)
eml_stored_at = db.Column(db.DateTime, nullable=True)
# Approval metadata
approved = db.Column(db.Boolean, nullable=False, default=False)
approved_at = db.Column(db.DateTime, nullable=True)
approved_by_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
# Soft-delete metadata (Inbox delete -> Admin restore)
deleted_at = db.Column(db.DateTime, nullable=True)
deleted_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
deleted_by_user = db.relationship("User", foreign_keys=[deleted_by_user_id])
class MailObject(db.Model):
__tablename__ = "mail_objects"
id = db.Column(db.Integer, primary_key=True)
mail_message_id = db.Column(db.Integer, db.ForeignKey("mail_messages.id"), nullable=False)
object_name = db.Column(db.String(512), nullable=False)
object_type = db.Column(db.String(128), nullable=True)
status = db.Column(db.String(64), nullable=True)
error_message = db.Column(db.Text, nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
class Ticket(db.Model):
__tablename__ = "tickets"
id = db.Column(db.Integer, primary_key=True)
ticket_code = db.Column(db.String(32), unique=True, nullable=False)
title = db.Column(db.String(255))
description = db.Column(db.Text)
# Date (Europe/Amsterdam) from which this ticket should be considered active
# for the scoped job(s) in Daily Jobs / Job Details views.
active_from_date = db.Column(db.Date, nullable=False)
# Audit timestamp: when the ticket was created (UTC, naive)
start_date = db.Column(db.DateTime, nullable=False)
resolved_at = db.Column(db.DateTime)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
class TicketScope(db.Model):
__tablename__ = "ticket_scopes"
id = db.Column(db.Integer, primary_key=True)
ticket_id = db.Column(db.Integer, db.ForeignKey("tickets.id"), nullable=False)
scope_type = db.Column(db.String(32), nullable=False)
customer_id = db.Column(db.Integer, db.ForeignKey("customers.id"))
backup_software = db.Column(db.String(128))
backup_type = db.Column(db.String(128))
job_id = db.Column(db.Integer, db.ForeignKey("jobs.id"))
job_name_match = db.Column(db.String(255))
job_name_match_mode = db.Column(db.String(32))
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
class TicketJobRun(db.Model):
__tablename__ = "ticket_job_runs"
id = db.Column(db.Integer, primary_key=True)
ticket_id = db.Column(db.Integer, db.ForeignKey("tickets.id"), nullable=False)
job_run_id = db.Column(db.Integer, db.ForeignKey("job_runs.id"), nullable=False)
linked_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
link_source = db.Column(db.String(64), nullable=False)
__table_args__ = (db.UniqueConstraint("ticket_id", "job_run_id", name="uq_ticket_job_run"),)
class Remark(db.Model):
__tablename__ = "remarks"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255))
body = db.Column(db.Text, nullable=False)
# Date (Europe/Amsterdam) from which this remark should be considered active
# for the scoped job(s) in Daily Jobs / Job Details views.
active_from_date = db.Column(db.Date)
start_date = db.Column(db.DateTime)
resolved_at = db.Column(db.DateTime)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
class RemarkScope(db.Model):
__tablename__ = "remark_scopes"
id = db.Column(db.Integer, primary_key=True)
remark_id = db.Column(db.Integer, db.ForeignKey("remarks.id"), nullable=False)
scope_type = db.Column(db.String(32), nullable=False)
customer_id = db.Column(db.Integer, db.ForeignKey("customers.id"))
backup_software = db.Column(db.String(128))
backup_type = db.Column(db.String(128))
job_id = db.Column(db.Integer, db.ForeignKey("jobs.id"))
job_name_match = db.Column(db.String(255))
job_name_match_mode = db.Column(db.String(32))
job_run_id = db.Column(db.Integer, db.ForeignKey("job_runs.id"))
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
class RemarkJobRun(db.Model):
__tablename__ = "remark_job_runs"
id = db.Column(db.Integer, primary_key=True)
remark_id = db.Column(db.Integer, db.ForeignKey("remarks.id"), nullable=False)
job_run_id = db.Column(db.Integer, db.ForeignKey("job_runs.id"), nullable=False)
linked_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
link_source = db.Column(db.String(64), nullable=False)
__table_args__ = (db.UniqueConstraint("remark_id", "job_run_id", name="uq_remark_job_run"),)
class FeedbackItem(db.Model):
__tablename__ = "feedback_items"
id = db.Column(db.Integer, primary_key=True)
# bug | feature
item_type = db.Column(db.String(16), nullable=False)
title = db.Column(db.String(255), nullable=False)
description = db.Column(db.Text, nullable=False)
component = db.Column(db.String(255), nullable=True)
# open | resolved
status = db.Column(db.String(16), nullable=False, default="open")
created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
resolved_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
resolved_at = db.Column(db.DateTime, nullable=True)
deleted_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
deleted_at = db.Column(db.DateTime, nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
class FeedbackVote(db.Model):
__tablename__ = "feedback_votes"
id = db.Column(db.Integer, primary_key=True)
feedback_item_id = db.Column(
db.Integer, db.ForeignKey("feedback_items.id"), nullable=False
)
user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
__table_args__ = (
db.UniqueConstraint(
"feedback_item_id", "user_id", name="uq_feedback_vote_item_user"
),
)
class NewsItem(db.Model):
__tablename__ = "news_items"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255), nullable=False)
body = db.Column(db.Text, nullable=False)
link_url = db.Column(db.String(2048), nullable=True)
severity = db.Column(db.String(32), nullable=False, default="info") # info, warning
pinned = db.Column(db.Boolean, nullable=False, default=False)
active = db.Column(db.Boolean, nullable=False, default=True)
publish_from = db.Column(db.DateTime, nullable=True)
publish_until = db.Column(db.DateTime, nullable=True)
created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
class NewsRead(db.Model):
__tablename__ = "news_reads"
id = db.Column(db.Integer, primary_key=True)
news_item_id = db.Column(db.Integer, db.ForeignKey("news_items.id"), nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
read_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
# --- Reporting (phase 1: raw data foundation) ---------------------------------
class ReportDefinition(db.Model):
__tablename__ = "report_definitions"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
description = db.Column(db.Text, nullable=True)
# one-time | scheduled
report_type = db.Column(db.String(32), nullable=False, default="one-time")
# csv | pdf (pdf is future)
output_format = db.Column(db.String(16), nullable=False, default="csv")
# customer scope for report generation
# all | single | multiple
customer_scope = db.Column(db.String(16), nullable=False, default="all")
# JSON encoded list of customer ids. NULL/empty when scope=all.
customer_ids = db.Column(db.Text, nullable=True)
period_start = db.Column(db.DateTime, nullable=False)
period_end = db.Column(db.DateTime, nullable=False)
# For scheduled reports in later phases (cron / RRULE style string)
schedule = db.Column(db.String(255), nullable=True)
created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
updated_at = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
)
created_by = db.relationship("User", foreign_keys=[created_by_user_id])
class ReportObjectSnapshot(db.Model):
__tablename__ = "report_object_snapshots"
id = db.Column(db.Integer, primary_key=True)
report_id = db.Column(db.Integer, db.ForeignKey("report_definitions.id"), nullable=False)
# Object identity (from customer_objects.object_name)
object_name = db.Column(db.Text, nullable=False)
# Job identity
job_id = db.Column(db.Integer, nullable=True)
job_name = db.Column(db.Text, nullable=True)
customer_id = db.Column(db.Integer, nullable=True)
customer_name = db.Column(db.Text, nullable=True)
backup_software = db.Column(db.Text, nullable=True)
backup_type = db.Column(db.Text, nullable=True)
# Run identity
run_id = db.Column(db.Integer, nullable=True)
run_at = db.Column(db.DateTime, nullable=True)
status = db.Column(db.Text, nullable=True)
missed = db.Column(db.Boolean, nullable=False, default=False)
override_applied = db.Column(db.Boolean, nullable=False, default=False)
reviewed_at = db.Column(db.DateTime, nullable=True)
ticket_number = db.Column(db.Text, nullable=True)
remark = db.Column(db.Text, nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
report = db.relationship(
"ReportDefinition",
backref=db.backref("object_snapshots", lazy="dynamic", cascade="all, delete-orphan"),
)
class ReportObjectSummary(db.Model):
__tablename__ = "report_object_summaries"
id = db.Column(db.Integer, primary_key=True)
report_id = db.Column(db.Integer, db.ForeignKey("report_definitions.id"), nullable=False)
object_name = db.Column(db.Text, nullable=False)
total_runs = db.Column(db.Integer, nullable=False, default=0)
success_count = db.Column(db.Integer, nullable=False, default=0)
success_override_count = db.Column(db.Integer, nullable=False, default=0)
warning_count = db.Column(db.Integer, nullable=False, default=0)
failed_count = db.Column(db.Integer, nullable=False, default=0)
missed_count = db.Column(db.Integer, nullable=False, default=0)
success_rate = db.Column(db.Float, nullable=False, default=0.0)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
report = db.relationship(
"ReportDefinition",
backref=db.backref("object_summaries", lazy="dynamic", cascade="all, delete-orphan"),
)

View File

@ -0,0 +1,132 @@
from __future__ import annotations
from sqlalchemy import text
from .database import db
def _update_override_applied_for_run(job_id: int, run_id: int) -> None:
"""Update JobRun.override_applied after run_object_links has been persisted.
This ensures overview counters (dashboard/run checks) reflect override state even when
the relevant error message only becomes available after object persistence.
"""
try:
# Lazy imports to avoid heavy imports / circular references at module import time.
from .models import Job, JobRun # noqa
from .main.routes_shared import _apply_overrides_to_run # noqa
job = Job.query.get(int(job_id))
run = JobRun.query.get(int(run_id))
if not job or not run:
return
_status, applied, _lvl = _apply_overrides_to_run(job, run)
applied_bool = bool(applied)
if bool(getattr(run, "override_applied", False)) != applied_bool:
run.override_applied = applied_bool
db.session.commit()
except Exception:
try:
db.session.rollback()
except Exception:
pass
def persist_objects_for_approved_run(customer_id: int, job_id: int, run_id: int, mail_message_id: int) -> int:
"""Persist parsed objects for reporting.
Copies entries from mail_objects into:
- customer_objects (unique per customer + object_name)
- job_object_links (job <-> customer_object)
- run_object_links (run <-> customer_object with status/error)
Returns number of objects processed.
"""
engine = db.get_engine()
processed = 0
with engine.begin() as conn:
rows = conn.execute(
text(
"""
SELECT object_name, object_type, status, error_message
FROM mail_objects
WHERE mail_message_id = :mail_message_id
ORDER BY id
"""
),
{"mail_message_id": mail_message_id},
).fetchall()
for r in rows:
object_name = (r[0] or "").strip()
if not object_name:
continue
object_type = r[1]
status = r[2]
error_message = r[3]
# 1) Upsert customer_objects and get id
customer_object_id = conn.execute(
text(
"""
INSERT INTO customer_objects (customer_id, object_name, object_type, first_seen_at, last_seen_at)
VALUES (:customer_id, :object_name, :object_type, NOW(), NOW())
ON CONFLICT (customer_id, object_name)
DO UPDATE SET
last_seen_at = NOW(),
object_type = COALESCE(EXCLUDED.object_type, customer_objects.object_type)
RETURNING id
"""
),
{
"customer_id": customer_id,
"object_name": object_name,
"object_type": object_type,
},
).scalar()
# 2) Upsert job_object_links
conn.execute(
text(
"""
INSERT INTO job_object_links (job_id, customer_object_id, first_seen_at, last_seen_at)
VALUES (:job_id, :customer_object_id, NOW(), NOW())
ON CONFLICT (job_id, customer_object_id)
DO UPDATE SET last_seen_at = NOW()
"""
),
{
"job_id": job_id,
"customer_object_id": customer_object_id,
},
)
# 3) Upsert run_object_links
conn.execute(
text(
"""
INSERT INTO run_object_links (run_id, customer_object_id, status, error_message, observed_at)
VALUES (:run_id, :customer_object_id, :status, :error_message, NOW())
ON CONFLICT (run_id, customer_object_id)
DO UPDATE SET
status = EXCLUDED.status,
error_message = EXCLUDED.error_message,
observed_at = NOW()
"""
),
{
"run_id": run_id,
"customer_object_id": customer_object_id,
"status": status,
"error_message": error_message,
},
)
processed += 1
return processed

View File

@ -0,0 +1,139 @@
from __future__ import annotations
from datetime import datetime
import html
from typing import Tuple, Optional, List, Dict
from ..database import db
from ..models import MailMessage, MailObject
from .threecx import try_parse_3cx
from .synology import try_parse_synology
from .boxafe import try_parse_boxafe
from .nakivo import try_parse_nakivo
from .veeam import try_parse_veeam
from .rdrive import try_parse_rdrive
from .syncovery import try_parse_syncovery
def _sanitize_text(value: object) -> object:
"""Normalize parsed text fields.
Some senders include HTML entities (e.g. "&amp;") in parsed header fields.
We decode those so values are stored/displayed consistently.
"""
if not isinstance(value, str):
return value
# html.unescape also handles numeric entities.
v = html.unescape(value)
# Keep normalization minimal and safe.
return v.strip()
def _clear_mail_objects(msg: MailMessage) -> None:
"""Delete existing MailObject rows for this message."""
MailObject.query.filter_by(mail_message_id=msg.id).delete()
def _store_mail_objects(msg: MailMessage, objects: List[Dict]) -> None:
"""Persist parsed objects for this mail message.
Each item in ``objects`` is expected to be a dict with keys:
- name (required)
- status (optional)
- error_message (optional)
"""
for item in objects or []:
name = (item.get("name") or "").strip()
if not name:
continue
object_type = (item.get("type") or item.get("object_type") or None)
if isinstance(object_type, str):
object_type = object_type.strip() or None
status = (item.get("status") or None) or None
error_message = item.get("error_message") or None
db.session.add(
MailObject(
mail_message_id=msg.id,
object_name=name,
object_type=object_type,
status=status,
error_message=error_message,
)
)
def parse_mail_message(msg: MailMessage) -> None:
"""Parse a single MailMessage and update its parsed fields.
This function is intentionally conservative: if no parser matches the
message, it simply sets parse_result to "no_match". Any unexpected errors
are caught and stored on the message so they are visible in the UI.
"""
# Reset parse metadata first
msg.backup_software = None
msg.backup_type = None
msg.job_name = None
msg.overall_status = None
msg.overall_message = None
# Optional parsed storage metrics (for graphing capacity usage)
if hasattr(msg, 'storage_used_bytes'):
msg.storage_used_bytes = None
if hasattr(msg, 'storage_capacity_bytes'):
msg.storage_capacity_bytes = None
if hasattr(msg, 'storage_free_bytes'):
msg.storage_free_bytes = None
if hasattr(msg, 'storage_free_percent'):
msg.storage_free_percent = None
msg.parse_error = None
msg.parsed_at = datetime.utcnow()
# Clear existing objects for this mail (if it has an id already)
if msg.id is not None:
_clear_mail_objects(msg)
try:
handled, result, objects = try_parse_3cx(msg)
if not handled:
handled, result, objects = try_parse_synology(msg)
if not handled:
handled, result, objects = try_parse_boxafe(msg)
if not handled:
handled, result, objects = try_parse_rdrive(msg)
if not handled:
handled, result, objects = try_parse_nakivo(msg)
if not handled:
handled, result, objects = try_parse_veeam(msg)
if not handled:
handled, result, objects = try_parse_syncovery(msg)
except Exception as exc:
msg.parse_result = "error"
msg.parse_error = str(exc)[:500]
return
if not handled:
# No parser recognised this message
msg.parse_result = "no_match"
return
# Apply parsed result
msg.backup_software = _sanitize_text(result.get("backup_software"))
msg.backup_type = _sanitize_text(result.get("backup_type"))
msg.job_name = _sanitize_text(result.get("job_name"))
msg.overall_status = result.get("overall_status")
msg.overall_message = _sanitize_text(result.get("overall_message"))
# Apply optional storage metrics if supported by the DB model
if hasattr(msg, 'storage_used_bytes'):
msg.storage_used_bytes = result.get('storage_used_bytes')
if hasattr(msg, 'storage_capacity_bytes'):
msg.storage_capacity_bytes = result.get('storage_capacity_bytes')
if hasattr(msg, 'storage_free_bytes'):
msg.storage_free_bytes = result.get('storage_free_bytes')
if hasattr(msg, 'storage_free_percent'):
msg.storage_free_percent = result.get('storage_free_percent')
msg.parse_result = "ok"
# Store parsed objects (they will be committed by the caller)
if msg.id is not None:
_store_mail_objects(msg, objects or [])

View File

@ -0,0 +1,109 @@
from __future__ import annotations
import re
from typing import Dict, Tuple, List, Optional
from ..models import MailMessage
_SUBJECT_RE = re.compile(
r"^\[(?P<severity>[^\]]+)\]\[(?P<app>Boxafe)\]\s+Notification\s+from\s+your\s+device:\s*(?P<device>.+?)\s*$",
flags=re.IGNORECASE,
)
def _normalize_html(value: str) -> str:
"""Normalize HTML content."""
if not value:
return ""
if isinstance(value, bytes):
try:
value = value.decode("utf-8", errors="ignore")
except Exception:
value = str(value)
value = value.replace("\x00", "").replace("\ufeff", "")
return value
def _strip_html_tags(value: str) -> str:
if not value:
return ""
value = re.sub(r"<\s*br\s*/?>", "\n", value, flags=re.IGNORECASE)
value = re.sub(r"</p>", "\n", value, flags=re.IGNORECASE)
value = re.sub(r"</div>", "\n", value, flags=re.IGNORECASE)
value = re.sub(r"<[^>]+>", "", value)
value = re.sub(r"[ \t\r\f\v]+", " ", value)
value = re.sub(r"\n\s*\n+", "\n", value)
return value.strip()
def _map_status(raw: str) -> Optional[str]:
if not raw:
return None
r = raw.strip().lower()
if r in {"warning", "warn"}:
return "Warning"
if r in {"success", "succeeded", "ok", "information", "info"}:
return "Success"
if r in {"failed", "failure", "error", "critical"}:
return "Failed"
# fallback: capitalize first letter
return raw.strip().capitalize() or None
def _extract_field(text: str, label: str) -> Optional[str]:
if not text:
return None
m = re.search(rf"(?im)^\s*{re.escape(label)}\s*:\s*(.+?)\s*$", text)
if not m:
return None
val = (m.group(1) or "").strip()
return val or None
def try_parse_boxafe(msg: MailMessage) -> Tuple[bool, Dict, List[Dict]]:
"""Parse Boxafe notification mails."""
subject = (getattr(msg, "subject", None) or "").strip()
m = _SUBJECT_RE.match(subject)
if not m:
return False, {}, []
severity = (m.group("severity") or "").strip()
device = (m.group("device") or "").strip()
html = _normalize_html(getattr(msg, "html_body", None) or "")
text = _strip_html_tags(html)
category = _extract_field(text, "Category")
body_severity = _extract_field(text, "Severity")
message = _extract_field(text, "Message")
overall_status = _map_status(body_severity or severity)
backup_type = category or "Shared Drives"
# Boxafe "Domain Accounts" notifications are not job/object based.
# Example: "[Boxafe] Backed up Email for 0 user(s)."
if (category or "").strip().lower() == "domain accounts":
if message and re.search(r"\bbacked\s+up\s+email\b", message, flags=re.IGNORECASE):
backup_type = "Domain Accounts Email"
elif message and re.search(r"\bbacked\s+up\s+contact\b", message, flags=re.IGNORECASE):
backup_type = "Domain Accounts Contact"
elif message and re.search(r"\bbacked\s+up\s+drive\b", message, flags=re.IGNORECASE):
backup_type = "Domain Accounts Drive"
elif message and re.search(r"\bbacked\s+up\s+calendar\b", message, flags=re.IGNORECASE):
backup_type = "Domain Accounts Calendar"
else:
backup_type = "Domain Accounts"
result: Dict[str, object] = {
"backup_software": "Boxafe",
"backup_type": backup_type,
"job_name": device,
"overall_status": overall_status,
"overall_message": message or (body_severity or severity),
}
objects: List[Dict] = []
return True, result, objects

View File

@ -0,0 +1,198 @@
from __future__ import annotations
import re
import html as htmllib
from typing import Dict, Tuple, List
from ..models import MailMessage
_STATUS_MAP = {
"successful": "Success",
"success": "Success",
"failed": "Error",
"failure": "Error",
"warning": "Warning",
"warnings": "Warning",
"partially successful": "Warning",
"partial": "Warning",
}
def _normalize_html(html: str) -> str:
if not html:
return ""
if isinstance(html, bytes):
try:
html = html.decode("utf-8", errors="ignore")
except Exception:
html = str(html)
html = html.replace("\x00", "")
html = html.replace("\ufeff", "")
return html
def _html_to_text(html: str) -> str:
"""Lightweight HTML to text conversion suitable for regex parsing."""
html = _normalize_html(html)
if not html:
return ""
# Remove script/style blocks
html = re.sub(r"(?is)<(script|style).*?>.*?</\1>", " ", html)
# Insert newlines / tabs for table-ish structures
html = re.sub(r"(?i)<br\s*/?>", "\n", html)
html = re.sub(r"(?i)</p\s*>", "\n", html)
html = re.sub(r"(?i)</tr\s*>", "\n", html)
html = re.sub(r"(?i)</(td|th)\s*>", "\t", html)
# Strip remaining tags
html = re.sub(r"(?s)<.*?>", " ", html)
text = htmllib.unescape(html)
# Normalize whitespace but keep tabs/newlines
text = text.replace("\r", "")
text = re.sub(r"[ ]+", " ", text)
text = re.sub(r"\t+", "\t", text)
text = re.sub(r"\n+", "\n", text)
text = re.sub(r" *\t *", "\t", text)
text = re.sub(r" *\n *", "\n", text)
return text.strip()
_SUBJECT_RE = re.compile(r'^"(?P<job>.+?)"\s+job:\s+(?P<status>.+?)\s*$', re.IGNORECASE)
def _is_nakivo_vmware_job(subject: str, text: str) -> bool:
subj = (subject or "").lower()
if " job:" in subj and subj.startswith('"') and "nakivo" in (text or "").lower():
return True
# Many NAKIVO mails contain the backup type in the body navigation:
t = (text or "").lower()
has_type = ("backup job for vmware" in t) or ("replication job for vmware" in t)
return has_type and ("job run report" in t or "nakivo" in t)
def _extract_backup_type(text: str) -> str:
if not text:
return "Backup job for VMware"
m = re.search(
r"\b(?P<t>(backup job for vmware|replication job for vmware))\b",
text,
re.IGNORECASE,
)
if not m:
return "Backup job for VMware"
tt = (m.group("t") or "").strip().lower()
if "replication" in tt:
return "Replication job for VMware"
return "Backup job for VMware"
def _extract_vm_objects(text: str) -> List[str]:
"""Extract VM/object names from the 'Virtual Machines' section."""
if not text:
return []
# Keep order and avoid duplicates.
objects: List[str] = []
seen = set()
# Work line-based because the HTML is heavily table driven.
lines = [ln.strip() for ln in text.split("\n")]
# Find the *last* 'Virtual Machines' section header. The report has a 'Contents' navigation
# near the top that also contains 'Virtual Machines', which must be ignored.
start_idx = None
for i, ln in enumerate(lines):
if re.fullmatch(r"virtual machines:?", ln, re.IGNORECASE):
start_idx = i + 1
if start_idx is None:
return []
stop_re = re.compile(
r"^(summary|target storage|alarms\s*&\s*notifications|last run:?|data\s*&\s*speed:?|duration:?|status:?|priority:?|started:?|finished:?|contents)$",
re.IGNORECASE,
)
name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9._-]{1,}$")
for ln in lines[start_idx:]:
if not ln:
continue
if stop_re.match(ln):
break
# VM/object names are typically on their own line.
if name_re.match(ln) and ln.lower() not in {"virtual", "machines"}:
if ln not in seen:
seen.add(ln)
objects.append(ln)
return objects
def try_parse_nakivo(msg: MailMessage) -> Tuple[bool, Dict, List[Dict]]:
"""Parse NAKIVO VMware run reports (Backup and Replication)."""
subject = getattr(msg, "subject", None) or ""
html_body = getattr(msg, "html_body", None) or ""
text = _html_to_text(html_body)
if not _is_nakivo_vmware_job(subject, text):
return False, {}, []
job_name = None
status_raw = None
m = _SUBJECT_RE.match(subject.strip())
if m:
job_name = (m.group("job") or "").strip()
status_raw = (m.group("status") or "").strip()
# Fallback: first occurrence of the backup type is followed by the job name in many templates.
if not job_name:
# Try to find a line with the backup type and then a nearby token that looks like a hostname/job name.
# This is intentionally conservative.
mt = re.search(
r"(backup|replication) job for vmware\b[\s\S]{0,400}?(?P<job>[A-Za-z0-9._-]{3,})\t",
text,
re.IGNORECASE,
)
if mt:
job_name = mt.group("job").strip()
if not status_raw:
# Look for a standalone status token near the top of the report
ms = re.search(r"\b(successful|failed|warning|warnings|partially successful)\b", text, re.IGNORECASE)
if ms:
status_raw = ms.group(1)
overall_status = "Unknown"
if status_raw:
overall_status = _STATUS_MAP.get(status_raw.strip().lower(), "Unknown")
backup_type = _extract_backup_type(text)
result: Dict = {
"backup_software": "NAKIVO",
"backup_type": backup_type,
"job_name": job_name or "",
"overall_status": overall_status,
"overall_message": overall_status,
}
objects: List[Dict] = []
for obj in _extract_vm_objects(text):
objects.append(
{
"name": obj,
"status": overall_status,
"error_message": "" if overall_status == "Success" else overall_status,
}
)
# Fallback: if we couldn't extract objects, use the job name.
if not objects and job_name:
objects.append(
{
"name": job_name,
"status": overall_status,
"error_message": "" if overall_status == "Success" else overall_status,
}
)
return True, result, objects

View File

@ -0,0 +1,115 @@
from __future__ import annotations
import re
from typing import Dict, Tuple, List
from ..models import MailMessage
def _strip_html_tags(value: str) -> str:
"""Minimal HTML-to-text conversion for parser fallback."""
if not value:
return ""
v = re.sub(r"<\s*br\s*/?>", "\n", value, flags=re.IGNORECASE)
v = re.sub(r"</p\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"</tr\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"</t[dh]\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"<[^>]+>", "", v)
return v
def _get_body_text(msg: MailMessage) -> str:
if msg.text_body:
return msg.text_body
if msg.html_body:
return _strip_html_tags(msg.html_body)
return ""
def _parse_overall_status(subject: str, body: str) -> str:
s = (subject or "").lower()
b = (body or "").lower()
if "completed successfully" in s or re.search(r"\bsuccess\b", b):
return "Success"
if "completed with errors" in s or "failed" in s or re.search(r"\berror\b", b):
return "Error"
if "completed with warnings" in s or re.search(r"\bwarning\b", b):
return "Warning"
return "Unknown"
def _parse_job_name(subject: str) -> str:
"""Extract job name from subject.
Example subject:
"Process completed successfully. RDrive SRV001 Autobedrijf Eric Boom"
We take the first two tokens after the first dot as the job name.
"""
s = (subject or "").strip()
if "." in s:
s = s.split(".", 1)[1].strip()
# Take first two words, which in practice yields e.g. "RDrive SRV001".
tokens = [t for t in re.split(r"\s+", s) if t]
if len(tokens) >= 2:
return f"{tokens[0]} {tokens[1]}".strip()
return (s or "").strip() or "R-Drive Image"
def _parse_primary_object(job_name: str, subject: str, body: str) -> str:
"""Best-effort extraction of the primary machine/object name.
Prefer the second token of the parsed job name (e.g. SRV001), otherwise
attempt to find a host-like token in the subject/body.
"""
parts = [p for p in re.split(r"\s+", (job_name or "").strip()) if p]
if len(parts) >= 2:
return parts[1]
# Fallback: look for common host tokens like SRV001.
hay = "\n".join([(subject or ""), (body or "")])
m = re.search(r"\b([A-Z]{2,6}\d{1,5})\b", hay)
if m:
return m.group(1)
return ""
def try_parse_rdrive(msg: MailMessage) -> Tuple[bool, Dict, List[Dict]]:
"""Parse R-Drive Image report mails."""
subject = (msg.subject or "").strip()
from_addr = (msg.from_address or "").lower()
body = _get_body_text(msg)
# Identification: subject pattern and/or body signature.
is_rdrive = (
"process completed" in subject.lower()
or "r-drive image" in body.lower()
or "rtt mailer" in body.lower()
or "r-drive" in from_addr
)
if not is_rdrive:
return False, {}, []
overall_status = _parse_overall_status(subject, body)
job_name = _parse_job_name(subject)
obj_name = _parse_primary_object(job_name, subject, body)
objects: List[Dict] = []
if obj_name:
objects.append({"name": obj_name, "status": overall_status, "error_message": ""})
overall_message = ""
# Keep overall message compact; use the first sentence of the subject if present.
if subject:
overall_message = subject.split(".", 1)[0].strip().rstrip(".")
result: Dict = {
"backup_software": "R-Drive Image",
"backup_type": "R-Drive Image",
"job_name": job_name,
"overall_status": overall_status,
"overall_message": overall_message,
}
return True, result, objects

View File

@ -0,0 +1,445 @@
PARSER_DEFINITIONS = [
{
"name": "veeam_backup_job",
"backup_software": "Veeam",
"backup_types": ["Backup Job"],
"order": 10,
"enabled": True,
"match": {
"from_contains": "veeam",
"subject_contains": "Backup Job",
},
"description": "Parses standard Veeam Backup Job reports.",
"example": {
"subject": 'Veeam Backup Job \"Daily-VM-Backup\" finished with WARNING',
"from_address": "veeam@customer.local",
"body_snippet": "Job 'Daily-VM-Backup' finished with warnings. See details for each VM below...",
"parsed_result": {
"backup_software": "Veeam",
"backup_type": "Backup Job",
"job_name": "Daily-VM-Backup",
"objects": [
{
"name": "VM-APP01",
"status": "Success",
"error_message": "",
},
{
"name": "VM-DB01",
"status": "Warning",
"error_message": "Low disk space warning",
},
{
"name": "VM-FS01",
"status": "Failed",
"error_message": "VSS error 0x800423f4",
},
],
},
},
},
{
"name": "veeam_replication_job",
"backup_software": "Veeam",
"backup_types": ["Replication job", "Replica Job"],
"order": 15,
"enabled": True,
"match": {
"from_contains": "veeam",
"subject_contains": "Replication",
},
"description": "Parses Veeam Replication/Replica job reports.",
"example": {
"subject": 'Veeam Replication job "DR-Replicate" finished successfully',
"from_address": "veeam@customer.local",
"body_snippet": "Replication job: DR-Replicate finished successfully.",
"parsed_result": {
"backup_software": "Veeam",
"backup_type": "Replication job",
"job_name": "DR-Replicate",
"objects": [
{
"name": "VM-APP01",
"status": "Success",
"error_message": "",
}
],
},
},
},
{
"name": "veeam_agent_backup",
"backup_software": "Veeam",
"backup_types": ["Agent Backup job"],
"order": 16,
"enabled": True,
"match": {
"from_contains": "veeam",
"subject_contains": "Agent Backup",
},
"description": "Parses Veeam Agent Backup job reports.",
"example": {
"subject": 'Veeam Agent Backup job "SERVER01" finished with WARNING',
"from_address": "veeam@customer.local",
"body_snippet": "Agent Backup job: SERVER01 finished with warnings.",
"parsed_result": {
"backup_software": "Veeam",
"backup_type": "Agent Backup job",
"job_name": "SERVER01",
"objects": [
{
"name": "SERVER01",
"status": "Warning",
"error_message": "",
}
],
},
},
},
{
"name": "veeam_m365_backup",
"backup_software": "Veeam",
"backup_types": ["Veeam Backup for Microsoft 365"],
"order": 17,
"enabled": True,
"match": {
"from_contains": "veeam",
"subject_contains": "Microsoft 365",
},
"description": "Parses Veeam Backup for Microsoft 365 job reports.",
"example": {
"subject": 'Veeam Backup for Microsoft 365 "Tenant - Daily M365" finished successfully',
"from_address": "veeam@customer.local",
"body_snippet": "Veeam Backup for Microsoft 365 report for job 'Daily M365'...",
"parsed_result": {
"backup_software": "Veeam",
"backup_type": "Veeam Backup for Microsoft 365",
"job_name": "Daily M365",
"objects": [
{
"name": "https://tenant.sharepoint.com",
"status": "Success",
"error_message": "",
}
],
},
},
},
{
"name": "veeam_backup_copy",
"backup_software": "Veeam",
"backup_types": ["Backup Copy Job"],
"order": 20,
"enabled": True,
"match": {
"from_contains": "veeam",
"subject_contains": "Backup Copy Job",
},
"description": "Parses Veeam Backup Copy Job status mails.",
"example": {
"subject": 'Veeam Backup Copy Job \"Offsite-Repository\" finished successfully',
"from_address": "veeam@customer.local",
"body_snippet": "Job 'Offsite-Repository' finished successfully.",
"parsed_result": {
"backup_software": "Veeam",
"backup_type": "Backup Copy Job",
"job_name": "Offsite-Repository",
"objects": [
{
"name": "Backup Copy Chain",
"status": "Success",
"error_message": "",
}
],
},
},
},
{
"name": "nakivo_vmware_backup",
"backup_software": "NAKIVO",
"backup_types": ["Backup job for VMware"],
"order": 30,
"enabled": True,
"match": {
"from_contains": "nakivo",
"subject_contains": "Backup job",
},
"description": "Parses NAKIVO VMware backup job notifications.",
"example": {
"subject": "Backup job for VMware 'Daily-Infra-Backup' completed with warnings",
"from_address": "backup@nakivo.local",
"body_snippet": "Job 'Daily-Infra-Backup' has completed with warnings.",
"parsed_result": {
"backup_software": "NAKIVO",
"backup_type": "Backup job for VMware",
"job_name": "Daily-Infra-Backup",
"objects": [
{
"name": "ESX-Host01",
"status": "Success",
"error_message": "",
},
{
"name": "VM-LOG01",
"status": "Warning",
"error_message": "Low free space on datastore",
},
],
},
},
},
{
"name": "nakivo_vmware_replication",
"backup_software": "NAKIVO",
"backup_types": ["Replication job for VMware"],
"order": 40,
"enabled": True,
"match": {
"from_contains": "nakivo",
"subject_contains": "Replication job",
},
"description": "Parses NAKIVO VMware replication job notifications.",
"example": {
"subject": "Replication job for VMware 'DR-Replicate' completed successfully",
"from_address": "backup@nakivo.local",
"body_snippet": "Job 'DR-Replicate' has completed successfully.",
"parsed_result": {
"backup_software": "NAKIVO",
"backup_type": "Replication job for VMware",
"job_name": "DR-Replicate",
"objects": [
{
"name": "VM-APP01",
"status": "Success",
"error_message": "",
},
],
},
},
},
{
"name": "panel3_failure",
"backup_software": "Panel3",
"backup_types": ["Backup Job"],
"order": 50,
"enabled": True,
"match": {
"from_contains": "panel3",
"subject_contains": "Backup failed",
},
"description": "Parses Panel3 backup failure notifications. No mail = success.",
"example": {
"subject": "Panel3 backup failed on 'Webserver01'",
"from_address": "backup@panel3.local",
"body_snippet": "The backup on server 'Webserver01' has failed.",
"parsed_result": {
"backup_software": "Panel3",
"backup_type": "Backup Job",
"job_name": "Webserver01-backup",
"objects": [
{
"name": "Webserver01",
"status": "Failed",
"error_message": "Backup process exited with error code 1",
}
],
},
},
},
{
"name": "synology_active_backup",
"backup_software": "Synology",
"backup_types": ["Active Backup"],
"order": 25,
"enabled": True,
"match": {
"subject_contains": "Active Backup",
},
"description": "Parses Synology Active Backup notifications (e.g. Active Backup for Google Workspace).",
"example": {
"subject": "NAS - Active Backup for Google Workspace - back-uptaak [Google D-Two] is gedeeltelijk voltooid",
"from_address": "nas@customer.local",
"body_snippet": "Back-up [Google D-Two] is voltooid, maar van sommige items kon geen back-up worden gemaakt.\n- Mijn schijf: succes: 0; waarschuwing: 11; fout: 0\n...",
"parsed_result": {
"backup_software": "Synology",
"backup_type": "Active Backup",
"job_name": "Google D-Two",
"overall_status": "Warning",
"overall_message": "11 warning(s)",
"objects": [],
},
},
},
{
"name": "synology_hyperbackup",
"backup_software": "Synology",
"backup_types": ["Hyperbackup"],
"order": 27,
"enabled": True,
"match": {
"subject_contains_any": ["Hyper Backup", "Gegevensback-uptaak", "Data backup task"],
"body_contains_any": ["Hyper Backup", "Taaknaam:", "Task name:"],
},
"description": "Parses Synology Hyper Backup task notifications.",
"example": {
"subject": "Gegevensback-uptaak op NAS01 is mislukt",
"from_address": "nas01@yourdomain",
"body_snippet": "Gegevensback-uptaak op NAS01 is mislukt\nTaaknaam: Data backup - NAS thuis\nGa naar Hyper Backup > Logboek voor meer informatie.",
"parsed_result": {
"backup_software": "Synology",
"backup_type": "Hyperbackup",
"job_name": "Data backup - NAS thuis",
"overall_status": "Failed",
"overall_message": "Failed",
"objects": [],
},
},
},
{
"name": "synology_rsync",
"backup_software": "Synology",
"backup_types": ["R-Sync"],
"order": 28,
"enabled": True,
"match": {
"subject_contains_any": ["R-Sync", "Rsync"],
"body_contains_any": ["Back-uptaak:", "Backup task:"],
},
"description": "Parses Synology Network Backup / R-Sync task notifications.",
"example": {
"subject": "Synology NAS Network backup - R-Sync ASP-NAS02 completed",
"from_address": "nas@customer.local",
"body_snippet": "Uw back-uptaak R-Sync ASP-NAS02 is nu voltooid.\n\nBack-uptaak: R-Sync ASP-NAS02\n...",
"parsed_result": {
"backup_software": "Synology",
"backup_type": "R-Sync",
"job_name": "R-Sync ASP-NAS02",
"overall_status": "Success",
"overall_message": "Success",
"objects": [],
},
},
},
{
"name": "nakivo_vmware_backup_job",
"backup_software": "NAKIVO",
"backup_types": ["Backup job for VMware"],
"order": 30,
"enabled": True,
"match": {
"from_contains": "nakivo",
"subject_contains": "job:",
},
"description": "Parses NAKIVO Backup & Replication reports for VMware backup jobs.",
"example": {
"subject": '"exchange01.kuiperbv.nl" job: Successful',
"from_address": "NAKIVO Backup & Replication <administrator@customer.local>",
"body_snippet": "Job Run Report... Backup job for VMware ... Successful",
"parsed_result": {
"backup_software": "NAKIVO",
"backup_type": "Backup job for VMware",
"job_name": "exchange01.kuiperbv.nl",
"objects": [
{
"name": "exchange01.kuiperbv.nl",
"status": "Success",
"error_message": "",
}
],
},
},
},
{
"name": "syncovery_backup",
"backup_software": "Syncovery",
"backup_types": ["Backup Job"],
"order": 60,
"enabled": True,
"match": {
"from_contains": "syncovery",
"subject_contains": "Profile",
},
"description": "Parses Syncovery profile result mails.",
"example": {
"subject": "Syncovery Profile 'Fileserver01' completed successfully",
"from_address": "syncovery@backup.local",
"body_snippet": "Profile 'Fileserver01' completed successfully.",
"parsed_result": {
"backup_software": "Syncovery",
"backup_type": "Backup Job",
"job_name": "Fileserver01",
"objects": [
{
"name": "Fileserver01",
"status": "Success",
"error_message": "",
}
],
},
},
},
{
"name": "rdrive_image",
"backup_software": "R-Drive Image",
"backup_types": ["R-Drive Image"],
"order": 65,
"enabled": True,
"match": {
"from_contains": "rdrive",
"subject_contains": "Process completed",
},
"description": "Parses R-Drive Image completion mails.",
"example": {
"subject": "Process completed successfully. RDrive SRV001 Autobedrijf Eric Boom",
"from_address": "rdrive@customer.local",
"body_snippet": "R-Drive Image 6.0 ... Operation completed successfully",
"parsed_result": {
"backup_software": "R-Drive Image",
"backup_type": "R-Drive Image",
"job_name": "RDrive SRV001",
"objects": [
{
"name": "SRV001",
"status": "Success",
"error_message": "",
}
],
},
},
},
{
"name": "syncovery_daily_summary",
"backup_software": "Syncovery",
"backup_types": ["Syncovery"],
"order": 70,
"enabled": True,
"match": {
"from_contains": "syncovery",
"subject_contains": "Daily Summary",
},
"description": "Parses Syncovery Daily Summary reports.",
"example": {
"subject": "[Syncovery v10.16.15] Daily Summary - ASP-Management.asp.scalahosting.solutions",
"from_address": "syncovery@scalahosting.solutions",
"body_snippet": "Syncovery v10... Daily Summary ... Jobs Run Fine: 1 ...",
"parsed_result": {
"backup_software": "Syncovery",
"backup_type": "Syncovery",
"job_name": "ASP-Management.asp.scalahosting.solutions",
"objects": [
{
"name": "Xelion SCP Backup JvG",
"status": "Success",
"error_message": "",
}
],
},
},
},
]

View File

@ -0,0 +1,208 @@
from __future__ import annotations
import re
import html
from typing import Dict, Tuple, List
from ..models import MailMessage
def _strip_html_tags(value: str) -> str:
"""Minimal HTML-to-text conversion for Syncovery daily summary mails."""
if not value:
return ""
v = re.sub(r"<\s*br\s*/?>", "\n", value, flags=re.IGNORECASE)
v = re.sub(r"</p\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"</tr\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"</t[dh]\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"<[^>]+>", "", v)
# Decode entities (&nbsp; etc.) and normalize non-breaking spaces.
v = html.unescape(v)
v = v.replace("\xa0", " ")
return v
def _get_body_text(msg: MailMessage) -> str:
if msg.text_body:
return msg.text_body
if msg.html_body:
return _strip_html_tags(msg.html_body)
return ""
def _normalize_ws(value: str) -> str:
return re.sub(r"\s+", " ", (value or "").strip())
def _parse_job_name(subject: str, body: str) -> str:
"""Extract the Syncovery instance/host name from subject or body."""
s = _normalize_ws(subject)
m = re.search(r"Daily\s+Summary\s*-\s*(.+)$", s, flags=re.IGNORECASE)
if m:
return m.group(1).strip()
# Fallback: body often contains a 'Running on :' line.
b = _normalize_ws(body)
m = re.search(r"Running\s+on\s*:\s*([^\s]+)", b, flags=re.IGNORECASE)
if m:
return m.group(1).strip()
return "Syncovery"
def _parse_int_stat(body: str, label: str) -> int:
b = body or ""
# e.g. "Jobs Run Fine: 1" or "Jobs Run Fine: &nbsp; 1"
m = re.search(rf"{re.escape(label)}\s*:\s*(\d+)\b", b, flags=re.IGNORECASE)
if m:
try:
return int(m.group(1))
except Exception:
return 0
# Fallback: allow label without colon when HTML-to-text conversion got messy
m = re.search(rf"{re.escape(label)}\s+(\d+)\b", b, flags=re.IGNORECASE)
if m:
try:
return int(m.group(1))
except Exception:
return 0
return 0
def _parse_objects(body: str) -> List[Dict]:
"""Extract per-job lines listed in the Daily Summary."""
objects: List[Dict] = []
lines = [l.strip() for l in (body or "").splitlines() if l.strip()]
# Syncovery daily summary has a "Jobs Run Fine" section with lines like:
# "27-12-2025 23:00:01 Xelion SCP Backup JvG 1 copied ..."
# Also a "Jobs Run With Errors" section in similar format.
current_status: str | None = None
i = 0
while i < len(lines):
line = lines[i]
low = line.lower()
if "jobs run fine" in low:
current_status = "Success"
i += 1
continue
if "jobs run with errors" in low:
current_status = "Error"
i += 1
continue
if current_status is None:
i += 1
continue
# Common HTML-to-text output splits table cells across lines:
# 1) timestamp
# 2) job name
# 3) details
ts = re.match(r"^(\d{2}[-/.]\d{2}[-/.]\d{4})\s+(\d{2}:\d{2}:\d{2})\b", line)
if ts:
job = ""
details = ""
if i + 1 < len(lines):
job = lines[i + 1].strip()
if i + 2 < len(lines):
details = lines[i + 2].strip()
if job:
objects.append(
{
"name": job,
"status": current_status,
"error_message": "" if current_status == "Success" else details,
}
)
i += 3
continue
m = re.match(
r"^(\d{2}[-/.]\d{2}[-/.]\d{4})\s+(\d{2}:\d{2}:\d{2})\s+(.+?)\s{2,}(.+)$",
line,
)
if m:
job = m.group(3).strip()
details = m.group(4).strip()
if job:
objects.append(
{
"name": job,
"status": current_status,
"error_message": "" if current_status == "Success" else details,
}
)
continue
# Looser match when spacing collapses: timestamp + job + "Duration:" marker
m = re.match(
r"^(\d{2}[-/.]\d{2}[-/.]\d{4})\s+(\d{2}:\d{2}:\d{2})\s+(.+?)\s+Duration\s*:\s*(.+)$",
line,
flags=re.IGNORECASE,
)
if m:
job = m.group(3).strip()
if job:
objects.append({"name": job, "status": current_status, "error_message": ""})
i += 1
# De-duplicate while preserving order
seen = set()
uniq: List[Dict] = []
for o in objects:
key = (o.get("name") or "").strip().lower()
if not key or key in seen:
continue
seen.add(key)
uniq.append(o)
return uniq
def try_parse_syncovery(msg: MailMessage) -> Tuple[bool, Dict, List[Dict]]:
"""Parse Syncovery Daily Summary mails."""
subject_raw = (msg.subject or "").strip()
subject = _normalize_ws(subject_raw)
from_addr = (msg.from_address or "").lower()
body = _get_body_text(msg)
is_syncovery = (
"syncovery" in subject.lower()
or "daily summary" in subject.lower()
or "syncovery" in from_addr
or "syncovery" in (body or "").lower()
)
if not is_syncovery:
return False, {}, []
job_name = _parse_job_name(subject_raw, body)
jobs_fine = _parse_int_stat(body, "Jobs Run Fine")
jobs_errors = _parse_int_stat(body, "Jobs Run With Errors")
scheduled_not_run = _parse_int_stat(body, "Scheduled Jobs Not Run")
if jobs_errors > 0:
overall_status = "Error"
elif scheduled_not_run > 0:
overall_status = "Warning"
elif jobs_fine > 0:
overall_status = "Success"
else:
overall_status = "Unknown"
overall_message = (
f"Jobs fine: {jobs_fine}, errors: {jobs_errors}, scheduled not run: {scheduled_not_run}"
)
objects = _parse_objects(body)
result: Dict = {
"backup_software": "Syncovery",
"backup_type": "Syncovery",
"job_name": job_name,
"overall_status": overall_status,
"overall_message": overall_message,
}
return True, result, objects

View File

@ -0,0 +1,417 @@
from __future__ import annotations
import re
from typing import Dict, Tuple, List, Optional
from ..models import MailMessage
# Synology notification parsers
#
# Supported:
# - Active Backup (Synology): notifications from Active Backup (e.g. Google Workspace)
# - Hyper Backup (Synology): task notifications from Hyper Backup
# - Account Protection (Synology): DSM Account Protection lockout notifications
_BR_RE = re.compile(r"<\s*br\s*/?\s*>", re.I)
_TAG_RE = re.compile(r"<[^>]+>")
_WS_RE = re.compile(r"[\t\r\f\v ]+")
# Synology DSM Account Protection (NL/EN)
# Subject often contains "account [user]" and body contains "Accountbeveiliging" (NL)
# or "Account Protection" (EN).
_ACCOUNT_PROTECTION_MARKER_RE = re.compile(r"account\s*(?:protection|beveiliging)", re.I)
_ACCOUNT_NAME_RE = re.compile(r"\baccount\s*\[(?P<name>[^\]]+)\]", re.I)
def _html_to_text(value: str) -> str:
"""Convert a small HTML mail body to plain text (best-effort)."""
if not value:
return ""
s = value
s = _BR_RE.sub("\n", s)
s = _TAG_RE.sub("", s)
s = s.replace("&nbsp;", " ")
s = s.replace("\u00a0", " ")
return _WS_RE.sub(" ", s).strip()
def _extract_job_name_active_backup(subject: str, text: str) -> Optional[str]:
# Prefer [Job Name] pattern in subject
m = re.search(r"\[([^\]]+)\]", subject or "")
if m:
return m.group(1).strip()
# Fallback: body sometimes contains "Back-up [Job] is ..."
m = re.search(r"Back-?up\s*\[([^\]]+)\]", text or "", re.I)
if m:
return m.group(1).strip()
return None
def _is_synology_account_protection(subject: str, text: str) -> bool:
subj = subject or ""
t = text or ""
# Marker can appear in subject or body.
if _ACCOUNT_PROTECTION_MARKER_RE.search(subj) or _ACCOUNT_PROTECTION_MARKER_RE.search(t):
return True
# Some NL subjects don't mention "Accountbeveiliging" but do include "account [user]".
return _ACCOUNT_NAME_RE.search(subj) is not None and (
"aanmeldingspogingen" in (t.lower())
or "login" in (t.lower())
or "sign-in" in (t.lower())
)
def _parse_account_protection(subject: str, text: str) -> Tuple[bool, Dict, List[Dict]]:
# Extract account name from subject or body.
m = _ACCOUNT_NAME_RE.search(subject or "") or _ACCOUNT_NAME_RE.search(text or "")
if not m:
return False, {}, []
job_name = (m.group("name") or "").strip()
if not job_name:
return False, {}, []
# Account protection notifications are informational and do not have schedules.
overall_status = "Warning"
overall_message = "Account Protection triggered"
# Provide a clearer message when we can detect the common lockout phrasing.
haystack = (text or "").lower()
if "mislukte" in haystack or "failed" in haystack:
overall_message = "Too many failed login attempts"
result: Dict = {
"backup_software": "Synology",
"backup_type": "Account Protection",
"job_name": job_name,
"overall_status": overall_status,
"overall_message": overall_message,
}
# No objects for this notification type.
return True, result, []
# Example (NL):
# "- Mijn schijf: succes: 0; waarschuwing: 11; fout: 0"
# Example (EN):
# "- My Drive: success: 0; warning: 11; error: 0"
_SERVICE_LINE_RE = re.compile(
r"^\s*[-•]\s*(?P<service>[^:]+):\s*"
r"(?:succes|success):\s*(?P<success>\d+)\s*;\s*"
r"(?:waarschuwing|warning):\s*(?P<warning>\d+)\s*;\s*"
r"(?:fout|error):\s*(?P<error>\d+)\s*$",
re.I,
)
def _extract_totals(text: str) -> Tuple[int, int, int]:
success = warning = error = 0
for line in (text or "").splitlines():
m = _SERVICE_LINE_RE.match(line.strip())
if not m:
continue
success += int(m.group("success"))
warning += int(m.group("warning"))
error += int(m.group("error"))
return success, warning, error
_ABB_SUBJECT_RE = re.compile(r"\bactive\s+backup\s+for\s+business\b", re.I)
# Example (NL):
# "De back-uptaak vSphere-Task-1 op KANTOOR-NEW is voltooid."
# Example (EN):
# "The backup task vSphere-Task-1 on KANTOOR-NEW has completed."
_ABB_COMPLETED_RE = re.compile(
r"\b(?:de\s+)?back-?up\s*taak\s+(?P<job>.+?)\s+op\s+(?P<host>.+?)\s+is\s+voltooid\b"
r"|\b(?:the\s+)?back-?up\s+task\s+(?P<job_en>.+?)\s+on\s+(?P<host_en>.+?)\s+(?:is\s+)?(?:completed|finished|has\s+completed)\b",
re.I,
)
_ABB_FAILED_RE = re.compile(
r"\b(?:de\s+)?back-?up\s*taak\s+.+?\s+op\s+.+?\s+is\s+mislukt\b"
r"|\b(?:the\s+)?back-?up\s+task\s+.+?\s+on\s+.+?\s+(?:has\s+)?failed\b",
re.I,
)
_ABB_DEVICE_LIST_RE = re.compile(r"^\s*(?:Apparaatlijst|Device\s+list)\s*:\s*(?P<list>.+?)\s*$", re.I)
def _is_synology_active_backup_for_business(subject: str, text: str) -> bool:
subj = subject or ""
if _ABB_SUBJECT_RE.search(subj):
return True
t = text or ""
return _ABB_SUBJECT_RE.search(t) is not None
def _parse_active_backup_for_business(subject: str, text: str) -> Tuple[bool, Dict, List[Dict]]:
haystack = f"{subject}\n{text}"
m = _ABB_COMPLETED_RE.search(haystack)
if not m:
# Not our ABB format
return False, {}, []
job_name = (m.group("job") or m.group("job_en") or "").strip()
host = (m.group("host") or m.group("host_en") or "").strip()
overall_status = "Success"
overall_message = "Success"
if _ABB_FAILED_RE.search(haystack):
overall_status = "Error"
overall_message = "Failed"
objects: List[Dict] = []
for line in (text or "").splitlines():
mm = _ABB_DEVICE_LIST_RE.match(line.strip())
if not mm:
continue
raw_list = (mm.group("list") or "").strip()
# "DC01, SQL01"
for name in [p.strip() for p in raw_list.split(",")]:
if name:
objects.append({"name": name, "status": overall_status})
result = {
"backup_software": "Synology",
"backup_type": "Active Backup for Business",
"job_name": job_name,
"overall_status": overall_status,
"overall_message": overall_message,
}
# Provide a slightly nicer overall message when host is available
if host and overall_message in ("Success", "Failed"):
result["overall_message"] = f"{overall_message} ({host})"
return True, result, objects
def _is_synology_active_backup(subject: str, text: str) -> bool:
# Keep matching conservative to avoid false positives.
subj = (subject or "").lower()
if "active backup" in subj:
return True
# Fallback for senders that don't include it in the subject
t = (text or "").lower()
return "active backup" in t and ("adminconsole" in t or "back-up" in t or "backup" in t)
def _is_synology_hyper_backup(subject: str, text: str) -> bool:
# Subject often does not mention Hyper Backup; body typically contains it.
s = (subject or "").lower()
t = (text or "").lower()
if "hyper backup" in s:
return True
# Dutch/English variants that appear in Hyper Backup task notifications.
if ("hyper backup" in t) and (
("taaknaam:" in t)
or ("task name:" in t)
or ("gegevensback-uptaak" in t)
or ("data backup task" in t)
):
return True
# Newer task notification variant (often used for cloud destinations like HiDrive)
# does not always include "Hyper Backup" in the subject/body but contains these fields.
if ("backup task:" in t) and ("backup destination:" in t):
return True
# Dutch task notification variant (e.g. "Uw back-uptaak ... is nu voltooid")
return ("back-uptaak:" in t) and ("back-updoel:" in t)
_HB_TASKNAME_RE = re.compile(
r"^(?:Taaknaam|Task name|Back-uptaak|Backup Task|Backup task)\s*:\s*(?P<name>.+)$",
re.I | re.M,
)
_HB_BACKUP_TASK_RE = re.compile(r"^Backup Task\s*:\s*(?P<name>.+)$", re.I | re.M)
_HB_FAILED_RE = re.compile(r"\bis\s+mislukt\b|\bhas\s+failed\b|\bfailed\b", re.I)
_HB_SUCCESS_RE = re.compile(
r"\bis\s+(?:nu\s+)?voltooid\b|\bhas\s+completed\b|\bsuccessful\b|\bgeslaagd\b",
re.I,
)
_HB_WARNING_RE = re.compile(r"\bgedeeltelijk\s+voltooid\b|\bpartially\s+completed\b|\bwarning\b|\bwaarschuwing\b", re.I)
# Synology Network Backup / R-Sync task notifications
# Example (NL):
# "Uw back-uptaak R-Sync ASP-NAS02 is nu voltooid."
# "Back-uptaak: R-Sync ASP-NAS02"
# Example (EN):
# "Your backup task R-Sync ASP-NAS02 has completed."
# "Backup task: R-Sync ASP-NAS02"
_RSYNC_MARKER_RE = re.compile(r"\br-?sync\b", re.I)
_RSYNC_TASK_RE = re.compile(r"^(?:Back-uptaak|Backup\s+task)\s*:\s*(?P<name>.+)$", re.I | re.M)
_RSYNC_FAILED_RE = re.compile(r"\bis\s+mislukt\b|\bhas\s+failed\b|\bfailed\b", re.I)
_RSYNC_WARNING_RE = re.compile(r"\bgedeeltelijk\s+voltooid\b|\bpartially\s+completed\b|\bwarning\b|\bwaarschuwing\b", re.I)
_RSYNC_SUCCESS_RE = re.compile(r"\bis\s+(?:nu\s+)?voltooid\b|\bhas\s+completed\b|\bcompleted\b|\bsuccessful\b|\bgeslaagd\b", re.I)
def _is_synology_rsync(subject: str, text: str) -> bool:
s = subject or ""
t = text or ""
if _RSYNC_MARKER_RE.search(s) or _RSYNC_MARKER_RE.search(t):
# Require the task field to reduce false positives
return _RSYNC_TASK_RE.search(t) is not None
return False
def _parse_rsync(subject: str, text: str) -> Tuple[bool, Dict, List[Dict]]:
m = _RSYNC_TASK_RE.search(text or "")
if not m:
return False, {}, []
job_name = (m.group("name") or "").strip()
if not job_name:
return False, {}, []
overall_status = "Success"
overall_message = "Success"
haystack = f"{subject}\n{text}"
if _RSYNC_FAILED_RE.search(haystack):
overall_status = "Failed"
overall_message = "Failed"
elif _RSYNC_WARNING_RE.search(haystack):
overall_status = "Warning"
overall_message = "Warning"
elif _RSYNC_SUCCESS_RE.search(haystack):
overall_status = "Success"
overall_message = "Success"
result: Dict = {
"backup_software": "Synology",
"backup_type": "R-Sync",
"job_name": job_name,
"overall_status": overall_status,
"overall_message": overall_message,
}
# R-Sync notifications do not provide an object list.
return True, result, []
def _parse_hyper_backup(subject: str, text: str) -> Tuple[bool, Dict, List[Dict]]:
m = _HB_TASKNAME_RE.search(text or "") or _HB_BACKUP_TASK_RE.search(text or "")
if not m:
return False, {}, []
job_name = (m.group("name") or "").strip()
if not job_name:
return False, {}, []
overall_status = "Success"
overall_message = "Success"
# Determine status. Prefer explicit failure first.
haystack = f"{subject}\n{text}"
if _HB_FAILED_RE.search(haystack):
overall_status = "Failed"
overall_message = "Failed"
elif _HB_WARNING_RE.search(haystack):
overall_status = "Warning"
overall_message = "Warning"
elif _HB_SUCCESS_RE.search(haystack):
overall_status = "Success"
overall_message = "Success"
backup_type = "Hyperbackup"
# If this is the "Backup Task:" variant and it references HiDrive/Strato, classify as Strato HiDrive.
if _HB_BACKUP_TASK_RE.search(text or "") and re.search(r"\bhidrive\b|\bstrato\b", (text or ""), re.I):
backup_type = "Strato HiDrive"
result: Dict = {
"backup_software": "Synology",
"backup_type": backup_type,
"job_name": job_name,
"overall_status": overall_status,
"overall_message": overall_message,
}
# Hyper Backup task notifications do not include an object list.
return True, result, []
def _parse_active_backup(subject: str, text: str) -> Tuple[bool, Dict, List[Dict]]:
job_name = _extract_job_name_active_backup(subject, text)
if not job_name:
# Not our Synology Active Backup format
return False, {}, []
success, warning, error = _extract_totals(text)
if error > 0:
overall_status = "Error"
overall_message = f"{error} error(s), {warning} warning(s)"
elif warning > 0:
overall_status = "Warning"
overall_message = f"{warning} warning(s)"
else:
overall_status = "Success"
overall_message = "Success"
result: Dict = {
"backup_software": "Synology",
"backup_type": "Active Backup",
"job_name": job_name,
"overall_status": overall_status,
"overall_message": overall_message,
}
# This mail type typically summarizes per-service counts only (no per-item list),
# so we intentionally return an empty object list.
return True, result, []
def try_parse_synology(msg: MailMessage) -> Tuple[bool, Dict, List[Dict]]:
"""Parse Synology notifications (Active Backup / Hyper Backup)."""
subject = getattr(msg, "subject", None) or ""
# Microsoft Graph stores either html_body (contentType=html) or text_body.
# Some senders only provide text/plain, so parsers must fall back to text_body.
html_body = getattr(msg, "html_body", None) or ""
text_body = getattr(msg, "text_body", None) or ""
# If html_body is empty, treat text_body as already-normalized text.
text = _html_to_text(html_body) if html_body else (text_body or "")
# DSM Account Protection (informational; no schedule)
if _is_synology_account_protection(subject, text):
ok, result, objects = _parse_account_protection(subject, text)
if ok:
return True, result, objects
# R-Sync first (otherwise it may be misclassified as Hyper Backup due to shared fields)
if _is_synology_rsync(subject, text):
ok, result, objects = _parse_rsync(subject, text)
if ok:
return True, result, objects
# Hyper Backup next (subject often doesn't include it)
if _is_synology_hyper_backup(subject, text):
ok, result, objects = _parse_hyper_backup(subject, text)
if ok:
return True, result, objects
# Active Backup for Business (ABB)
if _is_synology_active_backup_for_business(subject, text):
ok, result, objects = _parse_active_backup_for_business(subject, text)
if ok:
return True, result, objects
# Active Backup
if not _is_synology_active_backup(subject, text):
return False, {}, []
return _parse_active_backup(subject, text)

View File

@ -0,0 +1,54 @@
from __future__ import annotations
import re
from typing import Dict, Tuple, List
from ..models import MailMessage
def try_parse_3cx(msg: MailMessage) -> Tuple[bool, Dict, List[Dict]]:
"""Parse 3CX backup notification e-mails.
Expected:
Subject: '3CX Notification: Backup Complete - <host>'
Body contains: 'Backup name: <file>'
"""
subject = (msg.subject or "").strip()
if not subject:
return False, {}, []
m = re.match(r"^3CX Notification:\s*Backup Complete\s*-\s*(.+)$", subject, flags=re.IGNORECASE)
if not m:
return False, {}, []
job_name = m.group(1).strip()
body = (getattr(msg, "text_body", None) or getattr(msg, "body", None) or "")
# Some sources store plain text in html_body; fall back if needed.
if not body:
body = getattr(msg, "html_body", None) or ""
backup_file = None
m_file = re.search(r"^\s*Backup\s+name\s*:\s*(.+?)\s*$", body, flags=re.IGNORECASE | re.MULTILINE)
if m_file:
backup_file = m_file.group(1).strip()
objects: List[Dict] = []
if backup_file:
objects.append(
{
"name": backup_file,
"status": "Success",
"error_message": None,
}
)
result = {
"backup_software": "3CX",
"backup_type": None,
"job_name": job_name,
"overall_status": "Success",
"overall_message": None,
}
return True, result, objects

View File

@ -0,0 +1,900 @@
from __future__ import annotations
import re
from typing import Dict, Tuple, List, Optional
from ..models import MailMessage
# Supported backup types we recognise in the subject
VEEAM_BACKUP_TYPES = [
"Backup Job",
"Backup Copy Job",
"Replica Job",
"Replication job",
"Configuration Backup",
"Agent Backup job",
"Veeam Backup for Microsoft 365",
"Scale-out Backup Repository",
"Health Check",
]
def _strip_html_tags(value: str) -> str:
"""Very small helper to strip HTML tags from a string."""
if not value:
return ""
value = re.sub(r"<\s*br\s*/?>", "\n", value, flags=re.IGNORECASE)
value = re.sub(r"</p>", "\n", value, flags=re.IGNORECASE)
value = re.sub(r"<[^>]+>", "", value)
return re.sub(r"\s+", " ", value).strip()
def _html_to_text_preserve_lines(html: str) -> str:
"""Convert HTML to text while preserving meaningful line breaks.
We intentionally keep <br>, </td>, </tr>, and </p> as line breaks so that
multi-line warning/error blocks remain readable and can be stored as-is
for overrides.
"""
html = _normalize_html(html)
if not html:
return ""
# Normalize common line-break-ish tags to newlines.
v = html
v = re.sub(r"<\s*br\s*/?>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"</p\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"</t[dh]\s*>", "\n", v, flags=re.IGNORECASE)
v = re.sub(r"</tr\s*>", "\n", v, flags=re.IGNORECASE)
# Strip remaining tags.
v = re.sub(r"<[^>]+>", "", v)
# Basic entity cleanup.
v = v.replace("&nbsp;", " ")
# Normalize whitespace per line but keep line boundaries.
lines: List[str] = []
for line in re.split(r"\r?\n", v):
line = re.sub(r"\s+", " ", (line or "")).strip()
if line:
lines.append(line)
return "\n".join(lines).strip()
def _extract_configuration_job_overall_message(html: str) -> Optional[str]:
"""Extract a detailed overall message for Veeam Configuration Job mails.
These mails often contain the real problem statement as a multi-line
warning/error block inside a table cell (not as objects and not in
<span class="small_label"> blocks).
"""
text = _html_to_text_preserve_lines(html)
if not text:
return None
wanted_lines: List[str] = []
for line in text.split("\n"):
# Example:
# 26-12-2025 10:00:23 Warning Skipping server certificate backup because encryption is disabled
if re.match(r"^\d{2}-\d{2}-\d{4}\s+\d{2}:\d{2}:\d{2}\s+(Warning|Failed|Error)\b", line):
wanted_lines.append(line)
if not wanted_lines:
return None
return "\n".join(wanted_lines).strip() or None
def _normalize_html(html: str) -> str:
"""Normalize HTML content from mail bodies.
Some sources can yield HTML strings with embedded NUL characters (e.g.
after a UTF-16 conversion). Those NULs break regex-based parsing.
"""
if not html:
return ""
# Be defensive: ensure we have a string.
if isinstance(html, bytes):
try:
html = html.decode("utf-8", errors="ignore")
except Exception:
html = str(html)
# Remove NULs and BOMs that may be present in some HTML payloads.
html = html.replace("\x00", "")
html = html.replace("\ufeff", "")
return html
def _parse_storage_size_to_bytes(value: str) -> Optional[int]:
"""Parse a human-readable storage size into bytes.
Supports decimal comma (e.g. "107,6 TB") and units:
B, KB, MB, GB, TB, PB.
Returns None when parsing fails.
"""
if not value:
return None
v = value.strip()
v = v.replace("\xa0", " ")
m = re.search(r"(?i)([0-9]+(?:[\.,][0-9]+)?)\s*(B|KB|MB|GB|TB|PB)\b", v)
if not m:
return None
num_raw = (m.group(1) or "").replace(",", ".")
try:
num = float(num_raw)
except Exception:
return None
unit = (m.group(2) or "B").upper()
mult = {
"B": 1,
"KB": 1024,
"MB": 1024**2,
"GB": 1024**3,
"TB": 1024**4,
"PB": 1024**5,
}.get(unit, 1)
try:
return int(num * mult)
except Exception:
return None
def _parse_free_percent(value: str) -> Optional[float]:
if not value:
return None
m = re.search(r"(?i)([0-9]+(?:[\.,][0-9]+)?)\s*%\s*free", value)
if not m:
return None
raw = (m.group(1) or "").replace(",", ".")
try:
return float(raw)
except Exception:
return None
def _parse_sobr_capacity_metrics_from_html(html: str) -> Dict:
"""Extract repository-level capacity metrics for Veeam SOBR mails."""
html = _normalize_html(html)
if not html:
return {}
def _cell_value(label: str) -> Optional[str]:
# Matches: <strong>LABEL</strong></td><td ...>VALUE</td>
m = re.search(
rf"(?is)<strong>\s*{re.escape(label)}\s*</strong>\s*</td>\s*<td[^>]*>(.*?)</td>",
html,
)
if not m:
return None
return _strip_html_tags(m.group(1)).strip() or None
used_raw = _cell_value("Used Space")
cap_raw = _cell_value("Capacity")
used_b = _parse_storage_size_to_bytes(used_raw or "")
cap_b = _parse_storage_size_to_bytes(cap_raw or "")
free_b = None
if used_b is not None and cap_b is not None:
free_b = max(cap_b - used_b, 0)
free_pct = _parse_free_percent(cap_raw or "")
if free_pct is None and used_b is not None and cap_b:
try:
free_pct = float(round(((cap_b - used_b) / cap_b) * 100.0, 2))
except Exception:
free_pct = None
metrics = {
"storage_used_bytes": used_b,
"storage_capacity_bytes": cap_b,
"storage_free_bytes": free_b,
"storage_free_percent": free_pct,
"storage_used_raw": used_raw,
"storage_capacity_raw": cap_raw,
}
# Only return when at least one numeric metric was parsed.
if used_b is None and cap_b is None:
return {}
return metrics
def _parse_sobr_extents_from_html(html: str) -> List[Dict]:
"""Extract SOBR extents as objects.
Rows contain columns: Extent | Capacity | Used Space | Status.
Status is usually "Online".
"""
html = _normalize_html(html)
if not html:
return []
snippet = re.sub(r"\s+", " ", html)
row_pattern = re.compile(r"<tr[^>]*>(.*?)</tr>", re.IGNORECASE)
cell_pattern = re.compile(r"<t[dh][^>]*>(.*?)</t[dh]>", re.IGNORECASE)
objects: List[Dict] = []
for row_match in row_pattern.finditer(snippet):
row_html = row_match.group(1)
cells = cell_pattern.findall(row_html)
if len(cells) < 3:
continue
plain = [_strip_html_tags(c).strip() for c in cells]
if not plain or not plain[0]:
continue
# Skip header rows
if plain[0].lower() in ["extent", "type", "name", "object"]:
continue
extent_name = plain[0]
cap_raw = plain[1] if len(plain) > 1 else ""
used_raw = plain[2] if len(plain) > 2 else ""
status_raw = plain[3] if len(plain) > 3 else None
# Heuristic: extent rows contain at least one size value.
if not re.search(r"(?i)\b(TB|GB|MB|KB|PB)\b", cap_raw + " " + used_raw):
continue
obj = {
"name": extent_name,
"type": "Extent",
"status": (status_raw or "").strip() or None,
"error_message": None,
}
# Attach metrics in error_message as a compact string for visibility (optional).
used_b = _parse_storage_size_to_bytes(used_raw)
cap_b = _parse_storage_size_to_bytes(cap_raw)
free_pct = _parse_free_percent(cap_raw)
# Keep numeric values in the object dict as well (ignored by DB today but useful for future).
obj["used_bytes"] = used_b
obj["capacity_bytes"] = cap_b
obj["free_percent"] = free_pct
objects.append(obj)
# Deduplicate by name while preserving order
seen=set()
unique=[]
for o in objects:
n=o.get('name')
if not n or n in seen:
continue
seen.add(n)
unique.append(o)
return unique
# ---- Scale-out Backup Repository (SOBR) wrapper ----
def _parse_sobr_from_html(html: str) -> Tuple[List[Dict], Dict]:
"""Parse Veeam Scale-out Backup Repository report HTML.
Returns (objects, metrics_dict). Objects are extents.
"""
objects = _parse_sobr_extents_from_html(html)
metrics = _parse_sobr_capacity_metrics_from_html(html)
return objects, metrics
def _extract_overall_error_block(html: str) -> Optional[str]:
"""Extract a detailed error block (if any) from Veeam HTML reports.
Some Veeam mails (e.g. Backup Copy jobs) contain a message block like:
<span class="small_label">Cannot connect ...</span>
"""
if not html:
return None
m = re.search(r'<span[^>]*class="small_label"[^>]*>(.*?)</span>', html, flags=re.IGNORECASE | re.DOTALL)
if not m:
return None
text = _strip_html_tags(m.group(1))
text = text.strip()
return text or None
def _extract_processing_overall_message(html: str) -> Optional[str]:
"""Extract the Veeam "Processing <object>" overall message when present.
Some Veeam Backup Job templates include a "Processing <hostname>" marker
in the session details header. We store that marker as overall_message so
that it is available for override rules.
"""
if not html:
return None
html = _normalize_html(html)
# Look for a small_label span containing "Processing <something>".
m = re.search(
r'(?is)<span[^>]*class="small_label"[^>]*>\s*(Processing\s+[^<\r\n]+?)\s*</span>',
html,
)
if not m:
return None
text = _strip_html_tags(m.group(1) or "").strip()
if not text.lower().startswith("processing "):
return None
return text or None
def _extract_m365_overall_details_message(html: str) -> Optional[str]:
"""Extract overall Details message from Veeam Backup for Microsoft 365 summary.
Veeam Backup for Microsoft 365 emails often place an important warning/info
in the summary header table under the "Details" column (rowspan cell).
This message is not part of the object list and should be stored as the
overall message for the job run.
"""
if not html:
return None
# Look for the summary "Details" cell (typically a header_td with rowspan).
candidates = re.findall(
r'<td[^>]*rowspan\s*=\s*["\']?\s*2\s*["\']?[^>]*>(.*?)</td>',
html,
flags=re.IGNORECASE | re.DOTALL,
)
if not candidates:
return None
texts: List[str] = []
for raw in candidates:
text = _strip_html_tags(raw)
text = re.sub(r"\s+", " ", (text or "")).strip()
if not text:
continue
# Skip obvious non-messages
if text.lower() in {"details"}:
continue
# Ignore tiny fragments
if len(text) < 20:
continue
texts.append(text)
if not texts:
return None
# Prefer specific permission/role warnings over generic processed-count warnings.
for t in texts:
if "Missing application permissions" in t or "Missing application roles" in t:
return t
# If we have multiple candidates, avoid selecting the generic "Warning X of X objects processed"
generic_re = re.compile(r"^Warning\s+\d+\s+of\s+\d+\s+objects\s+processed\s*$", re.IGNORECASE)
filtered = [t for t in texts if not generic_re.match(t)]
if filtered:
return filtered[0]
return texts[0]
def _parse_objects_from_html(html: str) -> Tuple[Optional[str], List[Dict]]:
"""Try to extract objects from a Veeam HTML body.
Returns (overall_message, objects).
Objects are returned as dicts: {name, type?, status?, error_message?}
"""
html = _normalize_html(html)
if not html:
return None, []
overall_message: Optional[str] = None
objects: List[Dict] = []
snippet = re.sub(r"\s+", " ", html)
row_pattern = re.compile(r"<tr[^>]*>(.*?)</tr>", re.IGNORECASE)
cell_pattern = re.compile(r"<t[dh][^>]*>(.*?)</t[dh]>", re.IGNORECASE)
for row_match in row_pattern.finditer(snippet):
row_html = row_match.group(1)
cells = cell_pattern.findall(row_html)
if len(cells) < 2:
continue
# Convert cells to plain text
plain = [_strip_html_tags(c) for c in cells]
plain = [p.strip() for p in plain]
# Skip header rows
if plain and plain[0].lower() in ["type", "name", "object"]:
continue
# Health Check reports contain a banner row "Health Check Summary".
# It often has a second column with the overall status (e.g. "Error")
# and can be incorrectly treated as an object row.
if plain and (plain[0] or "").strip().lower() == "health check summary":
continue
# Veeam header lines like "Backup job:", "Replication job:", "Backup Copy job:", "Agent Backup job:", etc.
# These are overall banners and must never become an object.
if plain:
first = plain[0].strip()
first_l = first.lower()
if re.match(r"^(backup|replication|replica|backup\s+copy|configuration|agent\s+backup)\s+job\b", first_l):
# Keep a short overall message if present (mainly useful for failures),
# but never store it as an object.
if len(plain) >= 2:
msg_parts = [p for p in plain[1:] if p]
if msg_parts:
filtered_parts: List[str] = []
for part in msg_parts:
p = (part or "").strip()
pl = p.lower()
# Ignore banner-only status markers and VM count summaries.
if pl in ["success", "succeeded", "warning", "failed", "error"]:
continue
if "vms processed" in pl or re.match(r"^\d+\s+of\s+\d+\s+vms\s+processed$", pl):
continue
# Microsoft 365 reports can contain a generic processed-count warning
# in the banner (e.g. "Warning 90 of 90 objects processed"). That
# is not the real issue description and must not block extraction
# of the summary "Details" message.
if "objects processed" in pl and re.match(
r"^warning\s+\d+\s+of\s+\d+\s+objects\s+processed$", pl
):
continue
filtered_parts.append(p)
if filtered_parts:
overall_message = " ".join(filtered_parts).strip()
continue
# Microsoft 365 layout: first col Type, second col Name, third col Status
# (name is not always a URL; can be e.g. "Discovery Search Mailbox").
if len(plain) >= 3:
t = plain[0]
name = plain[1]
status = plain[2]
lowered_status = (status or "").lower()
if t and name and any(k in lowered_status for k in ["success", "succeeded", "warning", "failed", "error"]):
# Best-effort details: only keep details when not-success (to avoid storing
# e.g. sizes/durations as "error_message" for successful objects).
details_text = ""
if not any(k in lowered_status for k in ["success", "succeeded"]):
if len(plain) > 3:
for v in reversed(plain[3:]):
if v and v.strip():
details_text = v.strip()
break
objects.append(
{
"name": name,
"type": t,
"status": status or None,
"error_message": details_text or None,
}
)
continue
# Generic job object layout (common for Backup Job / Backup Copy Job):
# Name | Status | Start time | End time | Size | Read | Transferred | Duration | Details
# We treat column 1 as the object name and column 2 as the status.
if len(plain) >= 2:
name_candidate = (plain[0] or "").strip()
status_candidate = (plain[1] or "").strip()
lowered_status_candidate = status_candidate.lower()
# Do not treat the Health Check Summary banner as an object.
if name_candidate.lower() == "health check summary":
continue
if name_candidate and any(
key in lowered_status_candidate for key in ["success", "succeeded", "warning", "failed", "error"]
):
details_text: Optional[str] = None
# Only keep details when the object is not successful.
if not any(k in lowered_status_candidate for k in ["success", "succeeded"]):
# Prefer the last non-empty cell beyond the status column.
for idx in range(len(cells) - 1, 1, -1):
raw_cell_html = cells[idx] or ""
t = _html_to_text_preserve_lines(raw_cell_html)
if t:
details_text = t
break
objects.append(
{
"name": name_candidate,
"type": None,
"status": status_candidate or None,
"error_message": details_text or None,
}
)
continue
# Fallback layout: first cell object name, second cell status
name_raw = plain[0] if plain else ""
status_raw = plain[1] if len(plain) > 1 else ""
if not name_raw:
continue
# Health Check reports contain a header row ("Health Check Summary") that looks
# like an object row but must never be treated as an object.
if name_raw.strip().lower() == "health check summary":
continue
lowered_status = status_raw.lower()
if not any(key in lowered_status for key in ["success", "succeeded", "warning", "failed", "error"]):
continue
objects.append(
{
"name": name_raw,
"type": None,
"status": status_raw.strip() or None,
"error_message": None,
}
)
# Prefer a "Processing <object>" marker when present (used by Backup Job templates).
processing_msg = _extract_processing_overall_message(html)
if processing_msg:
overall_message = processing_msg
# Veeam Backup for Microsoft 365 often places the overall warning/info in the
# summary header "Details" cell instead of an error span or object row.
m365_details_msg = _extract_m365_overall_details_message(html)
if m365_details_msg:
# Always prefer the M365 summary "Details" message over the generic
# processed-count warning that can appear in banner/header rows.
generic_processed_re = re.compile(
r"^Warning\s+\d+\s+of\s+\d+\s+objects\s+processed\s*$", re.IGNORECASE
)
if (not overall_message) or generic_processed_re.match(overall_message or ""):
overall_message = m365_details_msg
else:
# If we already have a banner message, still override when it is a
# generic processed-count warning.
if "objects processed" in (overall_message or "").lower() and generic_processed_re.match(
(overall_message or "").strip()
):
overall_message = m365_details_msg
elif not overall_message:
overall_message = None
# If we still did not get an overall header message, try to extract the error block
if not overall_message:
overall_message = _extract_overall_error_block(html)
# Deduplicate by (type, name) while keeping order. In Veeam M365 reports the
# same "name" can appear under multiple object types (e.g. Mailbox and OneDrive).
seen = set()
unique_objects: List[Dict] = []
for obj in objects:
n = obj.get("name")
t = obj.get("type") or ""
key = (t, n)
if not n or key in seen:
continue
seen.add(key)
unique_objects.append(obj)
return overall_message, unique_objects
def _detect_backup_type_from_html(msg: MailMessage) -> Optional[str]:
html = _normalize_html(getattr(msg, "html_body", None) or "")
if "veeam backup for microsoft 365" in html.lower():
return "Veeam Backup for Microsoft 365"
return None
def _detect_status_from_html(html: str) -> Optional[str]:
"""Best-effort overall status detection from Veeam HTML.
Some Veeam templates do not include a [Success]/[Warning]/[Failed] marker
in the subject (notably some Veeam Backup for Microsoft 365 mails). In
those cases the HTML header usually contains a status cell with class
reportSuccess/reportWarning/reportFailed.
"""
html = _normalize_html(html)
if not html:
return None
m = re.search(r'(?i)class="report\s*(Success|Warning|Failed)"', html)
if m:
return m.group(1).capitalize()
# Fallback: look for a standalone status word in the header.
m2 = re.search(r'(?i)\b(Success|Succeeded|Warning|Failed|Error)\b', html)
if not m2:
return None
word = (m2.group(1) or "").lower()
if word in ["success", "succeeded"]:
return "Success"
if word in ["warning"]:
return "Warning"
return "Failed"
def _extract_job_name_and_type_from_html(html: str) -> Tuple[Optional[str], Optional[str]]:
"""Extract job name and type from the Veeam HTML banner/header.
The Veeam HTML report usually contains a colored header like:
Backup job: <JOBNAME>
This is more reliable than the subject which may include counters like
(1 objects) 1 warning
"""
html = _normalize_html(html)
if not html:
return None, None
m = re.search(
r"(?i)\b(backup\s+copy\s+job|backup\s+job|replica\s+job|replication\s+job|agent\s+backup\s+job|configuration\s+backup|scale-out\s+backup\s+repository)\s*:\s*([^<\r\n]+)",
html,
)
if not m:
return None, None
raw_type = re.sub(r"\s+", " ", (m.group(1) or "")).strip()
raw_name = re.sub(r"\s+", " ", (m.group(2) or "")).strip()
raw_name = raw_name.rstrip("-–—:").strip()
type_norm_map = {
"backup job": "Backup job",
"backup copy job": "Backup Copy job",
"replica job": "Replica job",
"replication job": "Replication job",
"agent backup job": "Agent Backup job",
"configuration backup": "Configuration Backup",
"scale-out backup repository": "Scale-out Backup Repository",
}
backup_type = type_norm_map.get(raw_type.lower(), raw_type)
return raw_name or None, backup_type or None
def _clean_job_name_from_rest(rest: str) -> str:
"""Strip common suffixes like '(3 objects)' '(2 machines)' '(1 VMs)' and issues counter."""
cleaned = rest.strip()
# Remove trailing issues suffix like ", 0 issues"
cleaned = re.sub(r"\s*,\s*\d+\s+issues\s*$", "", cleaned, flags=re.IGNORECASE).strip()
# Remove trailing count in parentheses
cleaned = re.sub(r"\(\d+\s+(objects?|machines?|vms?)\)\s*$", "", cleaned, flags=re.IGNORECASE).strip()
return cleaned
def _strip_retry_suffix(job_name: Optional[str]) -> Optional[str]:
"""Remove a trailing "(Retry)" / "(Retry N)" suffix from the job name.
Some backup products append retry counters to the job name. We do not
want retries to create distinct jobs in the UI/database.
"""
if not job_name:
return job_name
cleaned = re.sub(r"\s*\(\s*Retry(?:\s*\d+)?\s*\)\s*$", "", job_name, flags=re.IGNORECASE).strip()
return cleaned or None
def try_parse_veeam(msg: MailMessage) -> Tuple[bool, Dict, List[Dict]]:
"""Try to parse a Veeam backup report mail.
Returns:
(handled, result_dict, objects_list)
"""
subject = (msg.subject or "").strip()
if not subject:
return False, {}, []
# Some subjects can be prefixed (e.g. "SUPPORT EXPIRED") before the [Success]/[Warning] marker.
# Prefer the bracketed marker, but also support subjects like:
# Veeam Backup Job "X" finished with WARNING
# License / compliance mails can use [Error] instead of [Failed].
m_status = re.search(r"(?i)\[(Success|Warning|Failed|Error)\]\s*(.*)$", subject)
m_finished = re.search(r"(?i)\bfinished\s+with\s+(Success|Warning|Failed|Error)\b", subject)
html_body = _normalize_html(getattr(msg, "html_body", None) or "")
html_lower = html_body.lower()
# Special-case: Veeam Backup for Microsoft 365 mails can come without a
# subject marker. Detect via HTML and extract status from the banner.
is_m365 = "veeam backup for microsoft 365" in html_lower
# If we cannot detect a status marker and this is not an M365 report,
# we still try to parse when the subject strongly indicates a Veeam report.
if not m_status and not m_finished and not is_m365:
lowered = subject.lower()
if not any(k in lowered for k in ["veeam", "backup job", "backup copy job", "replica job", "configuration backup", "health check"]):
return False, {}, []
if m_status:
status_word = m_status.group(1)
rest = m_status.group(2)
elif m_finished:
status_word = m_finished.group(1)
rest = subject
else:
status_word = _detect_status_from_html(html_body) or "Success"
rest = subject
# License Key mails: these are compliance/renewal notifications and should never
# participate in schedule inference (no Expected/Missed). They also have no objects.
subject_lower = subject.lower()
is_license_key = (
'license key' in subject_lower
or 'new license key is not available' in subject_lower
or 'license support has expired' in html_lower
)
if is_license_key:
# Normalize common variants to keep the rest of the app consistent.
status_norm = (status_word or '').strip().lower()
if status_norm == 'succeeded':
status_norm = 'success'
if status_norm == 'failed':
status_norm = 'failed'
if status_norm == 'error':
status_word = 'Error'
elif status_norm == 'warning':
status_word = 'Warning'
elif status_norm == 'success':
status_word = 'Success'
elif status_norm == 'failed':
status_word = 'Failed'
result = {
'backup_software': 'Veeam',
'backup_type': 'License Key',
'job_name': 'License Key',
'overall_status': status_word or 'Error',
'overall_message': 'Current license support has expired, but new license key is not available',
}
return True, result, []
# Configuration Job detection (may not have object details)
subj_lower = subject.lower()
is_config_job = ('backup configuration job' in subj_lower) or ('configuration backup for' in html_lower)
# Extract job name/type from the HTML banner if present (more reliable than subject).
html_job_name, html_header_type = _extract_job_name_and_type_from_html(html_body)
# Prefer HTML-driven backup type detection for special layouts (M365).
html_backup_type = _detect_backup_type_from_html(msg)
backup_type = html_backup_type or html_header_type
# Otherwise determine backup type by looking for one of the known phrases in the subject
if not backup_type:
for candidate in VEEAM_BACKUP_TYPES:
if candidate.lower() in rest.lower():
backup_type = candidate
break
if not backup_type:
lowered = rest.lower()
if "replica" in lowered:
backup_type = "Replication job"
else:
backup_type = "Backup Job"
if is_config_job:
backup_type = "Configuration Job"
# Normalize known backup type variants
if (backup_type or '').lower() == 'scale-out backup repository':
backup_type = 'Scale-out Backup Repository'
# Normalize Health Check casing
if (backup_type or '').lower() == 'health check':
backup_type = 'Health Check'
# Job name
job_name: Optional[str] = None
if not is_config_job and html_job_name:
job_name = html_job_name
elif is_config_job:
m_cfg = re.search(
r"Configuration\s+Backup\s+for\s+([^<\n\r]+)",
_normalize_html(getattr(msg, "html_body", None) or ""),
flags=re.IGNORECASE,
)
if m_cfg:
job_name = m_cfg.group(1).strip()
else:
job_name = _clean_job_name_from_rest(rest)
else:
# Job name in quotes
job_match = re.search(r'\"([^\"]+)\"', rest)
if job_match:
job_name = job_match.group(1).strip()
else:
# M365 subject often: "<tenant> - <job> (N objects), X issues"
if backup_type == "Veeam Backup for Microsoft 365" and " - " in rest:
job_part = rest.split(" - ", 1)[1]
job_name = _clean_job_name_from_rest(job_part)
else:
job_name = _clean_job_name_from_rest(rest)
# Health Check mails should always yield a stable job name.
if (backup_type or '').lower() == 'health check':
job_name = 'Health Check'
# Health Check subjects usually look like: "Health check (N objects)".
# We normalize casing to keep job names stable.
if (backup_type or "").lower() == "health check":
job_name = "Health Check"
# SOBR subjects look like: "Scale-out Backup Repository: <Name>".
if (backup_type or '').lower() == 'scale-out backup repository':
m_sobr = re.search(r"(?i)scale-?out\s+backup\s+repository\s*:\s*(.+)$", rest)
if m_sobr:
job_name = (m_sobr.group(1) or '').strip().rstrip('-–—:').strip() or job_name
# Do not let retry counters create distinct job names.
job_name = _strip_retry_suffix(job_name)
# Health Check reports should always map to a stable job name.
if (backup_type or '').lower() == 'health check':
job_name = 'Health Check'
# Scale-out Backup Repository job name is the repository name after ':'
if (backup_type or '').lower() == 'scale-out backup repository':
m_repo = re.search(r'(?i)scale-?out\s+backup\s+repository\s*:\s*(.+)$', rest)
if m_repo:
job_name = (m_repo.group(1) or '').strip()
result: Dict = {
"backup_software": "Veeam",
"backup_type": backup_type,
"job_name": _strip_retry_suffix(job_name),
"overall_status": status_word,
}
overall_message, objects = _parse_objects_from_html(html_body)
# Scale-out Backup Repository parsing: extents + storage metrics
if (backup_type or '').lower() == 'scale-out backup repository':
sobr_objects, sobr_metrics = _parse_sobr_from_html(html_body)
if sobr_objects:
objects = sobr_objects
for k, v in (sobr_metrics or {}).items():
result[k] = v
if is_config_job:
objects = []
# Configuration Job mails usually store the meaningful warning/error
# text in a table cell rather than in an error span or object list.
# Extract and store it so it becomes visible in details and can be
# used for overrides.
if status_word != "Success" and not overall_message:
overall_message = _extract_configuration_job_overall_message(html_body)
# Keep detailed overall message for non-success states, and always keep
# the "Processing <object>" marker when present (used for overrides/rules).
if overall_message:
if status_word != "Success" or overall_message.lower().startswith("processing "):
result["overall_message"] = overall_message
return True, result, objects

View File

@ -0,0 +1,16 @@
/* Global layout constraints
- Consistent content width across all pages
- Optimized for 1080p while preventing further widening on higher resolutions
*/
/* Default pages: use more horizontal space on 1920x1080 */
main.content-container {
width: min(96vw, 1840px);
max-width: 1840px;
}
/* Dashboard: keep the original width */
main.dashboard-container {
width: min(90vw, 1728px);
max-width: 1728px;
}

View File

@ -0,0 +1,40 @@
/* Status text coloring (only the text inside the status column/cell) */
.status-text { font-weight: 600; }
/* Core statuses */
.status-success { color: var(--bs-success); }
.status-warning { color: var(--bs-warning); }
.status-error { color: var(--bs-danger); }
.status-failed { color: var(--bs-danger); }
/* Missed: neutral */
.status-missed { color: var(--bs-secondary); }
/* Expected: neutral */
.status-expected { color: var(--bs-secondary); }
/* Override applied indicator */
.status-override { color: var(--bs-primary); }
/* Status icons: same shape, only color differs */
.status-dot {
display: inline-block;
width: 0.7rem;
height: 0.7rem;
border-radius: 999px;
vertical-align: middle;
}
.dot-success { background: var(--bs-success); }
.dot-warning { background: var(--bs-warning); }
.dot-failed { background: var(--bs-danger); }
.dot-missed { background: var(--bs-secondary); }
.dot-expected {
background: #fff;
border: 2px solid var(--bs-secondary);
box-sizing: border-box;
}
.dot-override { background: var(--bs-primary); }
/* Optional: make empty status look normal */
.status-text:empty { font-weight: inherit; }

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

View File

@ -0,0 +1,43 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="row justify-content-center">
<div class="col-md-5">
<h2 class="mb-3">Initial admin setup</h2>
<p>Create the first admin user for this Backupchecks installation.</p>
<form method="post">
<div class="mb-3">
<label for="email" class="form-label">Admin username</label>
<input
type="text"
class="form-control"
id="username"
name="username"
value="{{ email or '' }}"
required
/>
</div>
<div class="mb-3">
<label for="password" class="form-label">Password</label>
<input
type="password"
class="form-control"
id="password"
name="password"
required
/>
</div>
<div class="mb-3">
<label for="confirm_password" class="form-label">Confirm password</label>
<input
type="password"
class="form-control"
id="confirm_password"
name="confirm_password"
required
/>
</div>
<button type="submit" class="btn btn-primary w-100">Create admin user</button>
</form>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,44 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="row justify-content-center">
<div class="col-md-4">
<h2 class="mb-3">Login</h2>
<form method="post">
<div class="mb-3">
<label for="email" class="form-label">Username</label>
<input
type="text"
class="form-control"
id="username"
name="username"
value="{{ email or '' }}"
required
/>
</div>
<div class="mb-3">
<label for="password" class="form-label">Password</label>
<input
type="password"
class="form-control"
id="password"
name="password"
required
/>
</div>
<div class="mb-3">
<label class="form-label">Captcha: {{ captcha_question }}</label>
<input
type="text"
class="form-control"
name="captcha"
required
/>
</div>
<button type="submit" class="btn btn-primary w-100">Login</button>
<div class="mt-3 text-center">
<a class="btn btn-link" href="{{ url_for('auth.password_reset_request') }}">Forgot password?</a>
</div>
</form>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,31 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="row justify-content-center">
<div class="col-md-5">
<h2 class="mb-3">Password reset</h2>
<p>This is a placeholder page. Password reset is not fully implemented yet.</p>
<form method="post">
<div class="mb-3">
<label for="email" class="form-label">Email</label>
<input
type="email"
class="form-control"
id="email"
name="email"
required
/>
</div>
<div class="mb-3">
<label class="form-label">Captcha: {{ captcha_question }}</label>
<input
type="text"
class="form-control"
name="captcha"
required
/>
</div>
<button type="submit" class="btn btn-primary w-100">Request reset</button>
</form>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,179 @@
<!doctype html>
{% set _theme_pref = (current_user.theme_preference if current_user.is_authenticated else 'auto') %}
<html lang="en" data-theme-preference="{{ _theme_pref }}">
<head>
<meta charset="utf-8" />
<title>Backupchecks</title>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css"
rel="stylesheet"
/>
<link rel="stylesheet" href="{{ url_for('static', filename='css/layout.css') }}" />
<link rel="stylesheet" href="{{ url_for('static', filename='css/status-text.css') }}" />
<link rel="icon" type="image/x-icon" href="{{ url_for('static', filename='favicon.ico') }}" />
<script>
(function () {
try {
var root = document.documentElement;
var pref = root.getAttribute('data-theme-preference') || 'auto';
var mq = window.matchMedia ? window.matchMedia('(prefers-color-scheme: dark)') : null;
function applyTheme() {
var theme = pref;
if (pref === 'auto') {
theme = (mq && mq.matches) ? 'dark' : 'light';
}
root.setAttribute('data-bs-theme', theme);
}
applyTheme();
if (mq && typeof mq.addEventListener === 'function') {
mq.addEventListener('change', function () {
if ((root.getAttribute('data-theme-preference') || 'auto') === 'auto') {
applyTheme();
}
});
} else if (mq && typeof mq.addListener === 'function') {
// Safari fallback
mq.addListener(function () {
if ((root.getAttribute('data-theme-preference') || 'auto') === 'auto') {
applyTheme();
}
});
}
} catch (e) {
// no-op
}
})();
</script>
</head>
<body>
<nav class="navbar navbar-expand-lg fixed-top bg-body-tertiary border-bottom">
<div class="container-fluid">
<a class="navbar-brand" href="{{ url_for('main.dashboard') }}">Backupchecks</a>
<button
class="navbar-toggler"
type="button"
data-bs-toggle="collapse"
data-bs-target="#navbarNav"
aria-controls="navbarNav"
aria-expanded="false"
aria-label="Toggle navigation"
>
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarNav">
{% if current_user.is_authenticated %}
<ul class="navbar-nav me-auto mb-2 mb-lg-0">
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.inbox') }}">Inbox</a>
</li>
{% if active_role == 'admin' %}
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.inbox_deleted_mails') }}">Deleted mails</a>
</li>
{% endif %}
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.customers') }}">Customers</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.jobs') }}">Jobs</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.daily_jobs') }}">Daily Jobs</a>
</li>
{% if active_role in ('admin', 'operator') %}
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.run_checks_page') }}">Run Checks</a>
</li>
{% endif %}
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.tickets_page') }}">Tickets</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.overrides') }}">Overrides</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.reports') }}">Reports</a>
</li>
{% if active_role == 'admin' %}
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.settings') }}">Settings</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.logging_page') }}">Logging</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.parsers_overview') }}">Parsers</a>
</li>
{% endif %}
<li class="nav-item">
<a class="nav-link" href='{{ url_for("main.changelog_page") }}'>Changelog</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ url_for('main.feedback_page') }}">Feedback</a>
</li>
</ul>
<span class="navbar-text me-3">
<a class="text-decoration-none" href="{{ url_for('main.user_settings') }}">
{{ current_user.username }} ({{ active_role }})
</a>
</span>
{% if current_user.is_authenticated and user_roles|length > 1 %}
<form method="post" action="{{ url_for('main.set_active_role_route') }}" class="me-2">
<select
class="form-select form-select-sm"
name="active_role"
aria-label="Role"
onchange="this.form.submit()"
style="min-width: 10rem; width: auto;"
>
{% for r in user_roles %}
<option value="{{ r }}" {% if r == active_role %}selected{% endif %}>{{ r|capitalize }}</option>
{% endfor %}
</select>
</form>
{% endif %}
<form method="post" action="{{ url_for('main.set_theme_preference') }}" class="me-2">
<select
class="form-select form-select-sm"
name="theme"
aria-label="Theme"
onchange="this.form.submit()"
style="width: auto;"
>
<option value="light" {% if _theme_pref == 'light' %}selected{% endif %}>Light</option>
<option value="dark" {% if _theme_pref == 'dark' %}selected{% endif %}>Dark</option>
<option value="auto" {% if _theme_pref == 'auto' %}selected{% endif %}>Auto</option>
</select>
</form>
<a class="btn btn-outline-secondary" href="{{ url_for('auth.logout') }}">Logout</a>
{% endif %}
</div>
</div>
</nav>
<main class="{% block main_class %}container content-container{% endblock %}" style="padding-top: 80px;">
{% with messages = get_flashed_messages(with_categories=true) %}
{% if messages %}
<div class="mb-3">
{% for category, message in messages %}
<div class="alert alert-{{ category }} alert-dismissible fade show" role="alert">
{{ message }}
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
</div>
{% endfor %}
</div>
{% endif %}
{% endwith %}
{% block content %}{% endblock %}
</main>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
</body>
</html>

View File

@ -0,0 +1,78 @@
{% extends 'layout/base.html' %}
{% block content %}
<div class="d-flex align-items-center justify-content-between mb-3">
<div>
<h1 class="h3 mb-1">Changelog</h1>
<div class="text-body-secondary">Product versions and changes.</div>
</div>
</div>
{# Completed (summary) #}
<div class="card mb-4">
<div class="card-header d-flex align-items-center justify-content-between">
<div class="fw-semibold">Completed</div>
<span class="badge text-bg-primary">History</span>
</div>
<div class="card-body">
{% if changelog.completed_summary and changelog.completed_summary|length > 0 %}
<div class="accordion" id="changelogCompletedAccordion">
{% for item in changelog.completed_summary %}
<div class="accordion-item">
<h2 class="accordion-header" id="completedHeading{{ loop.index }}">
<button class="accordion-button {% if not loop.first %}collapsed{% endif %}" type="button" data-bs-toggle="collapse" data-bs-target="#completedCollapse{{ loop.index }}" aria-expanded="{% if loop.first %}true{% else %}false{% endif %}" aria-controls="completedCollapse{{ loop.index }}">
<span class="fw-semibold">v{{ item.version }}</span>
</button>
</h2>
<div id="completedCollapse{{ loop.index }}" class="accordion-collapse collapse {% if loop.first %}show{% endif %}" aria-labelledby="completedHeading{{ loop.index }}" data-bs-parent="#changelogCompletedAccordion">
<div class="accordion-body">
{% if item.overview and item.overview|length > 0 %}
{% for p in item.overview %}
<p class="mb-2">{{ p }}</p>
{% endfor %}
{% endif %}
{% if item.categories and item.categories|length > 0 %}
{% for cat in item.categories %}
<div class="fw-semibold mb-2">{{ cat.category }}</div>
{# NOTE: 'items' is a dict key; use bracket notation to avoid calling dict.items() #}
{% if cat['items'] and cat['items']|length > 0 %}
{% for it in cat['items'] %}
<div class="mb-3">
{% if it.title %}
<div class="fw-semibold">{{ it.title }}</div>
{% endif %}
{% if it.details and it.details|length > 0 %}
<ul class="mb-0">
{% for d in it.details %}
<li>{{ d }}</li>
{% endfor %}
</ul>
{% endif %}
</div>
{% endfor %}
{% else %}
<div class="text-body-secondary mb-3">No items in this section.</div>
{% endif %}
{% endfor %}
{% elif item.highlights and item.highlights|length > 0 %}
<ul class="mb-0">
{% for h in item.highlights %}
<li>{{ h }}</li>
{% endfor %}
</ul>
{% else %}
<div class="text-body-secondary">No details.</div>
{% endif %}
</div>
</div>
</div>
{% endfor %}
</div>
{% else %}
<div class="text-body-secondary">No completed items.</div>
{% endif %}
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,174 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Customers</h2>
{% if can_manage %}
<div class="d-flex align-items-center gap-2 mb-3">
<form method="post" action="{{ url_for('main.customers_create') }}" class="d-flex align-items-center gap-2 mb-0" autocomplete="off" data-1p-ignore="true" data-lpignore="true">
<input type="text" name="name" class="form-control form-control-sm" placeholder="New customer name" required style="max-width: 320px;" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false" data-1p-ignore="true" data-lpignore="true" data-form-type="other" />
<div class="form-check form-check-inline mb-0">
<input class="form-check-input" type="checkbox" name="active" id="newCustomerActive" checked />
<label class="form-check-label small" for="newCustomerActive">Active</label>
</div>
<button type="submit" class="btn btn-primary btn-sm" style="white-space: nowrap;">Add</button>
</form>
<form method="post" action="{{ url_for('main.customers_import') }}" enctype="multipart/form-data" class="d-flex align-items-center gap-2 mb-0">
<input type="file" name="file" accept=".csv,text/csv" class="form-control form-control-sm" required style="max-width: 420px;" />
<button type="submit" class="btn btn-outline-secondary btn-sm" style="white-space: nowrap;">Import CSV</button>
</form>
<a class="btn btn-outline-secondary btn-sm" href="{{ url_for('main.customers_export') }}">Export CSV</a>
</div>
{% endif %}
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">Customer</th>
<th scope="col">Active</th>
<th scope="col">Number of jobs</th>
{% if can_manage %}
<th scope="col">Actions</th>
{% endif %}
</tr>
</thead>
<tbody>
{% if customers %}
{% for c in customers %}
<tr>
<td>{{ c.name }}</td>
<td>
{% if c.active %}
<span class="badge bg-success">Active</span>
{% else %}
<span class="badge bg-secondary">Inactive</span>
{% endif %}
</td>
<td>
{% if c.job_count > 0 %}
{{ c.job_count }}
{% else %}
<span class="text-danger fw-bold">0</span>
{% endif %}
</td>
{% if can_manage %}
<td>
<button
type="button"
class="btn btn-sm btn-outline-primary me-1 customer-edit-btn"
data-bs-toggle="modal"
data-bs-target="#editCustomerModal"
data-id="{{ c.id }}"
data-name="{{ c.name }}"
data-active="{{ '1' if c.active else '0' }}"
>
Edit
</button>
<form
method="post"
action="{{ url_for('main.customers_delete', customer_id=c.id) }}"
class="d-inline"
onsubmit="return confirm('Are you sure you want to delete this customer? All related jobs and mails will be removed.');"
>
<button type="submit" class="btn btn-sm btn-outline-danger">
Delete
</button>
</form>
</td>
{% endif %}
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="{% if can_manage %}4{% else %}3{% endif %}" class="text-center text-muted py-3">
No customers found.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{% if can_manage %}
<!-- Edit customer modal -->
<div class="modal fade" id="editCustomerModal" tabindex="-1" aria-labelledby="editCustomerModalLabel" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<form method="post" id="editCustomerForm" autocomplete="off" data-1p-ignore="true" data-lpignore="true">
<div class="modal-header">
<h5 class="modal-title" id="editCustomerModalLabel">Edit customer</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="mb-3">
<label for="edit_customer_name" class="form-label">Customer name</label>
<input
type="text"
class="form-control"
id="edit_customer_name"
name="name"
autocomplete="off"
autocorrect="off"
autocapitalize="off"
spellcheck="false"
data-1p-ignore="true"
data-lpignore="true"
data-form-type="other"
required
/>
</div>
<div class="form-check">
<input
class="form-check-input"
type="checkbox"
id="edit_customer_active"
name="active"
/>
<label class="form-check-label" for="edit_customer_active">
Active
</label>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
<button type="submit" class="btn btn-primary">Save changes</button>
</div>
</form>
</div>
</div>
</div>
<script>
(function () {
document.addEventListener("DOMContentLoaded", function () {
var editModalEl = document.getElementById("editCustomerModal");
if (!editModalEl) {
return;
}
var editForm = document.getElementById("editCustomerForm");
var nameInput = document.getElementById("edit_customer_name");
var activeInput = document.getElementById("edit_customer_active");
var editButtons = document.querySelectorAll(".customer-edit-btn");
editButtons.forEach(function (btn) {
btn.addEventListener("click", function () {
var id = btn.getAttribute("data-id");
var name = btn.getAttribute("data-name") || "";
var active = btn.getAttribute("data-active") === "1";
nameInput.value = name;
activeInput.checked = active;
if (id) {
editForm.action = "{{ url_for('main.customers_edit', customer_id=0) }}".replace("0", id);
}
});
});
});
})();
</script>
{% endif %}
{% endblock %}

View File

@ -0,0 +1,819 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Daily Jobs</h2>
<form method="get" class="row g-3 mb-3">
<div class="col-auto">
<label for="dj_date" class="form-label">Date</label>
<input
type="date"
class="form-control"
id="dj_date"
name="date"
value="{{ target_date_str }}"
/>
</div>
<div class="col-auto align-self-end">
<button type="submit" class="btn btn-primary">Show</button>
</div>
</form>
<div class="table-responsive">
<table class="table table-sm table-hover align-middle" id="dailyJobsTable">
<thead class="table-light">
<tr>
<th scope="col">Customer</th>
<th scope="col">Backup</th>
<th scope="col">Type</th>
<th scope="col">Job name</th>
<th scope="col">Time</th>
<th scope="col">Last result</th>
</tr>
</thead>
<tbody>
{% if rows %}
{% for row in rows %}
<tr
class="daily-job-row"
data-job-id="{{ row.job_id }}"
data-date="{{ target_date_str }}"
style="cursor: pointer;"
>
<td>{{ row.customer_name or "-" }}</td>
<td>{{ row.backup_software or "-" }}</td>
<td>{{ row.backup_type or "-" }}</td>
<td>{{ row.job_name or "-" }}</td>
<td>{{ row.expected_time }}</td>
<td>
{% set _s = (row.last_status or "")|lower %}
{% set _dot = "" %}
{% if row.last_override_applied %}{% set _dot = "dot-override" %}
{% elif _s == "success" %}{% set _dot = "dot-success" %}
{% elif _s == "warning" %}{% set _dot = "dot-warning" %}
{% elif _s == "error" %}{% set _dot = "dot-failed" %}
{% elif _s == "failed" %}{% set _dot = "dot-failed" %}
{% elif _s == "missed" %}{% set _dot = "dot-missed" %}
{% elif _s == "expected" %}{% set _dot = "dot-expected" %}
{% endif %}
{% if _dot %}<span class="status-dot {{ _dot }} me-2" title="{{ row.last_status }}" aria-label="{{ row.last_status }}" role="img"></span>{% endif %}
{% if row.has_active_ticket or row.has_active_remark %}
<span class="ms-2" style="white-space:nowrap;">
{% if row.has_active_ticket %}<span class="dj-alert-icon dj-alert-ticket" title="Active ticket(s)">🎫</span>{% endif %}
{% if row.has_active_remark %}<span class="dj-alert-icon dj-alert-remark" title="Active remark(s)">💬</span>{% endif %}
</span>
{% endif %}
{% if row.run_count > 1 %}
<span class="badge bg-info ms-1">{{ row.run_count }} runs</span>
{% elif row.run_count == 1 %}
<span class="badge bg-secondary ms-1">1 run</span>
{% endif %}
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="6" class="text-center text-muted py-3">
No jobs are scheduled for this date.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
<!-- Modal for daily job runs and mail details -->
<style>
/* Daily Jobs popup: match Inbox/Jobs width + keep scrolling INSIDE the popup (not the page). */
.modal-xxl { max-width: 98vw; }
@media (min-width: 1400px) { .modal-xxl { max-width: 1400px; } }
/* Constrain the modal to the viewport so the page behind it never needs to scroll. */
#dailyJobModal .modal-content {
height: 90vh;
display: flex;
flex-direction: column;
}
/* Keep modal-body itself from scrolling; scroll inside the mail/object panels. */
#dailyJobModal .modal-body {
overflow: hidden;
flex: 1 1 auto;
min-height: 0;
}
#dailyJobModal #dj_content {
height: 100%;
}
#dailyJobModal .dj-main-row {
height: 100%;
}
#dailyJobModal .dj-main-row > .col-md-3 {
display: flex;
flex-direction: column;
height: 100%;
min-height: 0;
}
#dailyJobModal .dj-detail-col {
display: flex;
flex-direction: column;
height: 100%;
min-height: 0;
}
#dailyJobModal .dj-mail-panel {
flex: 1 1 auto;
min-height: 0;
}
#dailyJobModal .dj-objects-panel {
flex: 0 0 auto;
}
/* Left runs list scrolls inside the popup. */
#dailyJobModal #dj_runs_list {
flex: 1 1 auto;
min-height: 0;
overflow: auto;
}
/* Mail + objects each have their own scroll areas, like Inbox/Jobs. */
#dailyJobModal #dj_body_iframe { height: 100%; }
#dailyJobModal .dj-objects-scroll { max-height: 25vh; overflow: auto; }
</style>
<div class="modal fade" id="dailyJobModal" tabindex="-1" aria-labelledby="dailyJobModalLabel" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable modal-xxl">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="dailyJobModalLabel">Job details</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div id="dj_loading" class="text-muted">Loading runs...</div>
<div id="dj_content" style="display: none;">
<div class="row mb-3">
<div class="col-12">
<div class="d-flex flex-wrap gap-3 small text-muted">
<div><strong>Customer:</strong> <span id="dj_customer"></span></div>
<div><strong>Backup:</strong> <span id="dj_backup"></span></div>
<div><strong>Type:</strong> <span id="dj_type"></span></div>
<div><strong>Job:</strong> <span id="dj_job"></span></div>
</div>
</div>
</div>
<div class="row g-3 dj-main-row">
<div class="col-md-3">
<h6 class="mb-2">Runs</h6>
<div id="dj_runs_list" class="list-group"></div>
</div>
<div class="col-md-9 dj-detail-col">
<dl class="row mb-3">
<dt class="col-3">From</dt>
<dd class="col-9" id="dj_from"></dd>
<dt class="col-3">Subject</dt>
<dd class="col-9" id="dj_subject"></dd>
<dt class="col-3">Received</dt>
<dd class="col-9" id="dj_received"></dd>
<dt class="col-3">Status</dt>
<dd class="col-9" id="dj_status"></dd>
<dt class="col-3">Remark</dt>
<dd class="col-9" id="dj_remark" style="white-space: pre-wrap;"></dd>
<dt class="col-3">Meldingen</dt>
<dd class="col-9">
<div id="dj_alerts" class="small"></div>
<div class="mt-2">
<div class="row g-2 align-items-start">
<div class="col-12 col-lg-6">
<div class="border rounded p-2">
<div class="d-flex align-items-center justify-content-between">
<div class="fw-semibold">New ticket</div>
<button type="button" class="btn btn-sm btn-outline-primary" id="dj_ticket_save">Add</button>
</div>
<div class="mt-2">
<textarea class="form-control form-control-sm" id="dj_ticket_description" rows="2" placeholder="Description (optional)"></textarea>
</div>
<div class="mt-2 small text-muted" id="dj_ticket_status"></div>
</div>
</div>
<div class="col-12 col-lg-6">
<div class="border rounded p-2">
<div class="d-flex align-items-center justify-content-between">
<div class="fw-semibold">New remark</div>
<button type="button" class="btn btn-sm btn-outline-secondary" id="dj_remark_save">Add</button>
</div>
<div class="mt-2">
<textarea class="form-control form-control-sm" id="dj_remark_body" rows="2" placeholder="Body (required)"></textarea>
</div>
<div class="mt-2 small text-muted" id="dj_remark_status"></div>
</div>
</div>
</div>
</div>
</dd>
</dl>
<div class="mb-3 dj-mail-panel">
<h6>Mail</h6>
<iframe
id="dj_body_iframe"
class="border rounded"
style="width:100%;"
sandbox="allow-popups allow-popups-to-escape-sandbox allow-same-origin"
referrerpolicy="no-referrer"
></iframe>
</div>
<div class="dj-objects-panel">
<h6>Objects</h6>
<div class="table-responsive dj-objects-scroll">
<table class="table table-sm table-bordered" id="dj_objects_table">
<thead class="table-light" style="position: sticky; top: 0; z-index: 1;">
<tr>
<th scope="col">Object</th>
<th scope="col">Type</th>
<th scope="col">Status</th>
<th scope="col">Error</th>
</tr>
</thead>
<tbody></tbody>
</table>
</div>
</div>
</div>
</div>
</div>
<div id="dj_no_runs" class="text-muted" style="display: none;">
No runs found for this job on the selected date.
</div>
</div>
<div class="modal-footer">
<a id="dj_eml_btn" class="btn btn-outline-primary" href="#" style="display:none;" rel="nofollow">Download EML</a>
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
(function () {
function statusClass(status) {
var s = (status || "").toString().toLowerCase();
// Allow derived display strings like "Success (override)"
if (s.indexOf("override") !== -1) return "status-override";
if (s === "success") return "status-success";
if (s === "warning") return "status-warning";
if (s === "error") return "status-error";
if (s === "failed") return "status-failed";
if (s === "missed") return "status-missed";
if (s === "expected") return "status-expected";
return "";
}
function statusDotClass(status) {
var s = (status || "").toString().toLowerCase();
// Allow derived display strings like "Success (override)"
if (s.indexOf("override") !== -1) return "dot-override";
if (s === "success") return "dot-success";
if (s === "warning") return "dot-warning";
if (s === "error") return "dot-failed";
if (s === "failed") return "dot-failed";
if (s === "missed") return "dot-missed";
if (s === "expected") return "dot-expected";
return "";
}
function wrapMailHtml(html) {
html = html || "";
// Ensure we render the mail HTML with its own CSS, isolated from the site styling.
return (
"<!doctype html><html><head><meta charset=\"utf-8\">" +
"<base target=\"_blank\">" +
"</head><body style=\"margin:0; padding:8px;\">" +
html +
"</body></html>"
);
}
var currentJobId = null;
var currentRunId = null;
function escapeHtml(s) {
return (s || "").toString()
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#39;");
}
function apiJson(url, opts) {
opts = opts || {};
opts.headers = opts.headers || {};
opts.headers["Content-Type"] = "application/json";
return fetch(url, opts).then(function (r) {
return r.json().then(function (j) {
if (!r.ok || !j || j.status !== "ok") {
var msg = (j && j.message) ? j.message : ("Request failed (" + r.status + ")");
throw new Error(msg);
}
return j;
});
});
}
function renderAlerts(payload) {
var box = document.getElementById("dj_alerts");
if (!box) return;
var tickets = (payload && payload.tickets) || [];
var remarks = (payload && payload.remarks) || [];
if (!tickets.length && !remarks.length) {
box.innerHTML = '<span class="text-muted">No tickets or remarks linked to this run.</span>';
return;
}
var html = '';
if (tickets.length) {
html += '<div class="mb-2"><strong>Tickets</strong><div class="mt-1">';
tickets.forEach(function (t) {
var status = t.resolved_at ? 'Resolved' : 'Active';
html += '<div class="mb-2 border rounded p-2" data-alert-type="ticket" data-id="' + t.id + '">' +
'<div class="d-flex align-items-start justify-content-between gap-2">' +
'<div class="flex-grow-1 min-w-0">' +
'<div class="text-truncate">' +
'<span class="me-1" title="Ticket">🎫</span>' +
'<span class="fw-semibold">' + escapeHtml(t.ticket_code || '') + '</span>' +
'<span class="ms-2 badge ' + (t.resolved_at ? 'bg-secondary' : 'bg-warning text-dark') + '">' + status + '</span>' +
'</div>' +
(t.description ? ('<div class="small text-muted mt-1">' + escapeHtml(t.description) + '</div>') : '') +
'</div>' +
'<div class="d-flex gap-1 flex-shrink-0">' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="toggle-edit-ticket" data-id="' + t.id + '" ' + (t.resolved_at ? 'disabled' : '') + '>Edit</button>' +
'<button type="button" class="btn btn-sm btn-outline-success" data-action="resolve-ticket" data-id="' + t.id + '" ' + (t.resolved_at ? 'disabled' : '') + '>Resolve</button>' +
'</div>' +
'</div>' +
'<div class="mt-2" data-edit="ticket" style="display:none;">' +
'<div class="row g-2">' +
'<div class="col-12">' +
'<textarea class="form-control form-control-sm" data-field="description" rows="2" placeholder="Description (optional)">' + escapeHtml(t.description || '') + '</textarea>' +
'</div>' +
'<div class="col-12 d-flex gap-2">' +
'<button type="button" class="btn btn-sm btn-primary" data-action="save-ticket" data-id="' + t.id + '">Save</button>' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="cancel-edit" data-id="' + t.id + '">Cancel</button>' +
'<div class="small text-muted align-self-center" data-field="status"></div>' +
'</div>' +
'</div>' +
'</div>' +
'</div>';
});
html += '</div></div>';
}
if (remarks.length) {
html += '<div class="mb-2"><strong>Remarks</strong><div class="mt-1">';
remarks.forEach(function (r) {
var status = r.resolved_at ? 'Resolved' : 'Active';
html += '<div class="mb-2 border rounded p-2" data-alert-type="remark" data-id="' + r.id + '">' +
'<div class="d-flex align-items-start justify-content-between gap-2">' +
'<div class="flex-grow-1 min-w-0">' +
'<div class="text-truncate">' +
'<span class="me-1" title="Remark">💬</span>' +
'<span class="fw-semibold">Remark</span>' +
'<span class="ms-2 badge ' + (r.resolved_at ? 'bg-secondary' : 'bg-warning text-dark') + '">' + status + '</span>' +
'</div>' +
(r.body ? ('<div class="small text-muted mt-1">' + escapeHtml(r.body) + '</div>') : '') +
'</div>' +
'<div class="d-flex gap-1 flex-shrink-0">' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="toggle-edit-remark" data-id="' + r.id + '" ' + (r.resolved_at ? 'disabled' : '') + '>Edit</button>' +
'<button type="button" class="btn btn-sm btn-outline-success" data-action="resolve-remark" data-id="' + r.id + '" ' + (r.resolved_at ? 'disabled' : '') + '>Resolve</button>' +
'</div>' +
'</div>' +
'<div class="mt-2" data-edit="remark" style="display:none;">' +
'<div class="row g-2">' +
'<div class="col-12">' +
'<textarea class="form-control form-control-sm" data-field="body" rows="2" placeholder="Body (required)">' + escapeHtml(r.body || '') + '</textarea>' +
'</div>' +
'<div class="col-12 d-flex gap-2">' +
'<button type="button" class="btn btn-sm btn-primary" data-action="save-remark" data-id="' + r.id + '">Save</button>' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="cancel-edit" data-id="' + r.id + '">Cancel</button>' +
'<div class="small text-muted align-self-center" data-field="status"></div>' +
'</div>' +
'</div>' +
'</div>' +
'</div>';
});
html += '</div></div>';
}
box.innerHTML = html;
Array.prototype.forEach.call(box.querySelectorAll('button[data-action]'), function (btn) {
btn.addEventListener('click', function (ev) {
ev.preventDefault();
var action = btn.getAttribute('data-action');
var id = btn.getAttribute('data-id');
if (!action || !id) return;
var wrapper = btn.closest('[data-alert-type]');
if (action === 'resolve-ticket') {
if (!confirm('Mark ticket as resolved?')) return;
apiJson('/api/tickets/' + encodeURIComponent(id) + '/resolve', {method: 'POST', body: '{}'})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) { alert(e.message || 'Failed.'); });
} else if (action === 'resolve-remark') {
if (!confirm('Mark remark as resolved?')) return;
apiJson('/api/remarks/' + encodeURIComponent(id) + '/resolve', {method: 'POST', body: '{}'})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) { alert(e.message || 'Failed.'); });
} else if (action === 'toggle-edit-ticket') {
if (!wrapper) return;
var edit = wrapper.querySelector('[data-edit="ticket"]');
if (!edit) return;
edit.style.display = (edit.style.display === 'none' || !edit.style.display) ? '' : 'none';
} else if (action === 'toggle-edit-remark') {
if (!wrapper) return;
var edit2 = wrapper.querySelector('[data-edit="remark"]');
if (!edit2) return;
edit2.style.display = (edit2.style.display === 'none' || !edit2.style.display) ? '' : 'none';
} else if (action === 'cancel-edit') {
if (!wrapper) return;
var editAny = wrapper.querySelector('[data-edit]');
if (editAny) editAny.style.display = 'none';
} else if (action === 'save-ticket') {
if (!wrapper) return;
var editT = wrapper.querySelector('[data-edit="ticket"]');
if (!editT) return;
var descEl = editT.querySelector('[data-field="description"]');
var statusEl = editT.querySelector('[data-field="status"]');
var descVal = descEl ? descEl.value : '';
if (statusEl) statusEl.textContent = 'Saving...';
apiJson('/api/tickets/' + encodeURIComponent(id), {
method: 'PATCH',
body: JSON.stringify({description: descVal})
})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) {
if (statusEl) statusEl.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
} else if (action === 'save-remark') {
if (!wrapper) return;
var editR = wrapper.querySelector('[data-edit="remark"]');
if (!editR) return;
var bodyEl2 = editR.querySelector('[data-field="body"]');
var statusEl2 = editR.querySelector('[data-field="status"]');
var bodyVal2 = bodyEl2 ? bodyEl2.value : '';
if (!bodyVal2 || !bodyVal2.trim()) {
if (statusEl2) statusEl2.textContent = 'Body is required.';
else alert('Body is required.');
return;
}
if (statusEl2) statusEl2.textContent = 'Saving...';
apiJson('/api/remarks/' + encodeURIComponent(id), {
method: 'PATCH',
body: JSON.stringify({body: bodyVal2})
})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) {
if (statusEl2) statusEl2.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
}
});
});
}
function loadAlerts(runId) {
var box = document.getElementById('dj_alerts');
if (!box) return;
if (!runId) {
box.innerHTML = '<span class="text-muted">Select a run to manage tickets / remarks.</span>';
if (window.__djClearCreateStatus) {
window.__djClearCreateStatus();
}
if (window.__djSetCreateDisabled) {
window.__djSetCreateDisabled(true);
}
return;
}
box.innerHTML = '<span class="text-muted">Loading alerts...</span>';
fetch('/api/job-runs/' + encodeURIComponent(runId) + '/alerts')
.then(function (r) { return r.json(); })
.then(function (j) {
if (!j || j.status !== 'ok') {
var msg = (j && j.message) ? j.message : 'Failed to load alerts.';
throw new Error(msg);
}
renderAlerts(j);
})
.catch(function (e) {
box.innerHTML = '<span class="text-danger">' + escapeHtml(e.message || 'Failed') + '</span>';
});
}
function bindInlineCreateForms() {
var btnTicket = document.getElementById('dj_ticket_save');
var btnRemark = document.getElementById('dj_remark_save');
var tDesc = document.getElementById('dj_ticket_description');
var tStatus = document.getElementById('dj_ticket_status');
var rBody = document.getElementById('dj_remark_body');
var rStatus = document.getElementById('dj_remark_status');
function clearStatus() {
if (tStatus) tStatus.textContent = '';
if (rStatus) rStatus.textContent = '';
}
function setDisabled(disabled) {
if (btnTicket) btnTicket.disabled = disabled;
if (btnRemark) btnRemark.disabled = disabled;
if (tDesc) tDesc.disabled = disabled;
if (rBody) rBody.disabled = disabled;
}
window.__djSetCreateDisabled = setDisabled;
window.__djClearCreateStatus = clearStatus;
if (btnTicket) {
btnTicket.addEventListener('click', function () {
if (!currentRunId) { alert('Select a run first.'); return; }
clearStatus();
var description = tDesc ? tDesc.value : '';
if (tStatus) tStatus.textContent = 'Saving...';
apiJson('/api/tickets', {
method: 'POST',
body: JSON.stringify({job_run_id: currentRunId, description: description})
})
.then(function () {
if (tDesc) tDesc.value = '';
if (tStatus) tStatus.textContent = '';
loadAlerts(currentRunId);
})
.catch(function (e) {
if (tStatus) tStatus.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
});
}
if (btnRemark) {
btnRemark.addEventListener('click', function () {
if (!currentRunId) { alert('Select a run first.'); return; }
clearStatus();
var body = rBody ? rBody.value : '';
if (!body || !body.trim()) {
if (rStatus) rStatus.textContent = 'Body is required.';
else alert('Body is required.');
return;
}
if (rStatus) rStatus.textContent = 'Saving...';
apiJson('/api/remarks', {
method: 'POST',
body: JSON.stringify({job_run_id: currentRunId, body: body})
})
.then(function () {
if (rBody) rBody.value = '';
if (rStatus) rStatus.textContent = '';
loadAlerts(currentRunId);
})
.catch(function (e) {
if (rStatus) rStatus.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
});
}
setDisabled(true);
}
function renderRun(data, runIndex) {
var runs = data.runs || [];
if (!runs.length) {
document.getElementById("dj_content").style.display = "none";
var nr = document.getElementById("dj_no_runs");
nr.textContent = (data && data.message) ? data.message : "No runs found.";
nr.style.display = "block";
return;
}
if (runIndex < 0 || runIndex >= runs.length) {
runIndex = 0; // default to newest run
}
var run = runs[runIndex] || {};
// Ensure overrides are consistently visible in Daily Jobs.
var runStatusDisplay = run.status || "";
if (run.override_applied && runStatusDisplay.toString().toLowerCase().indexOf("override") === -1) {
runStatusDisplay = (runStatusDisplay ? (runStatusDisplay + " (override)") : "Success (override)");
}
var emlBtn = document.getElementById("dj_eml_btn");
if (emlBtn) {
if (run.has_eml && run.mail_message_id) {
emlBtn.href = "{{ url_for('main.message_eml', message_id=0) }}".replace("0", String(run.mail_message_id));
emlBtn.style.display = "inline-block";
} else {
emlBtn.href = "#";
emlBtn.style.display = "none";
}
}
// Highlight active run in the list
var list = document.getElementById("dj_runs_list");
if (list) {
Array.prototype.forEach.call(list.querySelectorAll(".list-group-item"), function (el) {
if (String(el.getAttribute("data-run-idx")) === String(runIndex)) {
el.classList.add("active");
} else {
el.classList.remove("active");
}
});
}
var stEl = document.getElementById("dj_status");
if (stEl) {
var d = statusDotClass(runStatusDisplay);
stEl.innerHTML = (d ? ('<span class="status-dot ' + d + ' me-2" aria-hidden="true"></span>') : '') + escapeHtml(runStatusDisplay || '');
}
document.getElementById("dj_remark").textContent = run.remark || "";
currentRunId = run.id || null;
if (window.__djClearCreateStatus) {
window.__djClearCreateStatus();
}
if (window.__djSetCreateDisabled) {
window.__djSetCreateDisabled(!currentRunId);
}
loadAlerts(currentRunId);
var mail = run.mail || null;
if (mail) {
document.getElementById("dj_from").textContent = mail.from_address || "";
document.getElementById("dj_subject").textContent = mail.subject || "";
document.getElementById("dj_received").textContent = mail.received_at || "";
} else {
document.getElementById("dj_from").textContent = "";
document.getElementById("dj_subject").textContent = "";
document.getElementById("dj_received").textContent = "";
}
var bodyFrame = document.getElementById("dj_body_iframe");
if (bodyFrame) {
bodyFrame.srcdoc = wrapMailHtml(run.body_html || "");
}
var tbody = document.querySelector("#dj_objects_table tbody");
tbody.innerHTML = "";
(run.objects || []).forEach(function (obj) {
var tr = document.createElement("tr");
var tdName = document.createElement("td");
tdName.textContent = obj.name || "";
tr.appendChild(tdName);
var tdType = document.createElement("td");
tdType.textContent = obj.type || "";
tr.appendChild(tdType);
var tdStatus = document.createElement("td");
tdStatus.className = "status-text " + statusClass(obj.status);
var d = statusDotClass(obj.status);
tdStatus.innerHTML = (d ? ('<span class="status-dot ' + d + ' me-2" aria-hidden="true"></span>') : '') + escapeHtml(obj.status || '');
tr.appendChild(tdStatus);
var tdError = document.createElement("td");
tdError.textContent = obj.error_message || "";
tr.appendChild(tdError);
tbody.appendChild(tr);
});
}
function attachDailyJobsHandlers() {
var rows = document.querySelectorAll(".daily-job-row");
if (!rows.length) {
return;
}
rows.forEach(function (row) {
row.addEventListener("click", function () {
var jobId = row.getAttribute("data-job-id");
var dateStr = row.getAttribute("data-date");
if (!jobId || !dateStr) {
return;
}
currentJobId = jobId;
var modalEl = document.getElementById("dailyJobModal");
var modal = bootstrap.Modal.getOrCreateInstance(modalEl);
modal.show();
document.getElementById("dj_loading").style.display = "block";
document.getElementById("dj_content").style.display = "none";
document.getElementById("dj_no_runs").style.display = "none";
fetch(
"{{ url_for('main.daily_jobs_details') }}" +
"?job_id=" +
encodeURIComponent(jobId) +
"&date=" +
encodeURIComponent(dateStr)
)
.then(function (response) {
var ct = (response.headers && response.headers.get("content-type")) || "";
if (!response.ok) {
throw new Error("Failed to load runs (" + response.status + ").");
}
if (ct.indexOf("application/json") === -1) {
throw new Error("Unexpected response while loading runs.");
}
return response.json();
})
.then(function (data) {
document.getElementById("dj_loading").style.display = "none";
if (!data || data.status !== "ok") {
var nr = document.getElementById("dj_no_runs");
nr.textContent = (data && data.message) ? data.message : "No runs found.";
nr.style.display = "block";
return;
}
var job = data.job || {};
var runs = data.runs || [];
document.getElementById("dj_customer").textContent = job.customer_name || "";
document.getElementById("dj_backup").textContent = job.backup_software || "";
document.getElementById("dj_type").textContent = job.backup_type || "";
document.getElementById("dj_job").textContent = job.job_name || "";
document.getElementById("dailyJobModalLabel").textContent =
(job.job_name || "Job") + " - " + (job.customer_name || "");
var listEl = document.getElementById("dj_runs_list");
listEl.innerHTML = "";
runs.forEach(function (run, idx) {
var a = document.createElement("button");
a.type = "button";
a.className = "list-group-item list-group-item-action";
a.setAttribute("data-run-idx", String(idx));
var rs = run.status || "";
if (run.override_applied && rs.toString().toLowerCase().indexOf("override") === -1) {
rs = (rs ? (rs + " (override)") : "Success (override)");
}
var label = (run.run_at || "Run") + " - " + (rs || "");
a.textContent = label;
a.addEventListener("click", function (ev) {
ev.preventDefault();
renderRun(data, idx);
});
listEl.appendChild(a);
});
if (runs.length) {
document.getElementById("dj_content").style.display = "block";
document.getElementById("dj_no_runs").style.display = "none";
// runs are returned newest-first
renderRun(data, 0);
} else {
document.getElementById("dj_content").style.display = "none";
var nr = document.getElementById("dj_no_runs");
nr.textContent = (data && data.message) ? data.message : "No runs found.";
nr.style.display = "block";
}
})
.catch(function (err) {
console.error(err);
try { modal.show(); } catch (e) {}
document.getElementById("dj_loading").style.display = "none";
document.getElementById("dj_content").style.display = "none";
var nr = document.getElementById("dj_no_runs");
nr.textContent = (err && err.message) ? err.message : "No runs found.";
nr.style.display = "block";
});
});
});
}
document.addEventListener("DOMContentLoaded", function () {
bindInlineCreateForms();
attachDailyJobsHandlers();
});
})();
</script>
{% endblock %}

View File

@ -0,0 +1,173 @@
{% extends "layout/base.html" %}
{% block main_class %}container dashboard-container{% endblock %}
{% block content %}
<h2 class="mb-4">Dashboard</h2>
<div class="row g-3 mb-4">
<div class="col-12 col-md-3">
<div class="card h-100">
<div class="card-body">
<div class="text-muted">Inbox</div>
<div class="display-6 mb-0">{{ inbox_count }}</div>
<div class="text-muted small mt-2">Open items</div>
</div>
</div>
</div>
<div class="col-12 col-md-9">
<div class="row g-3">
<div class="col-6 col-lg-2">
<div class="card h-100">
<div class="card-body">
<div class="text-muted"><span class="status-dot dot-success me-2" aria-hidden="true"></span>Success</div>
<div class="display-6 mb-0">{{ jobs_success_count }}</div>
</div>
</div>
</div>
<div class="col-6 col-lg-2">
<div class="card h-100">
<div class="card-body">
<div class="text-muted"><span class="status-dot dot-override me-2" aria-hidden="true"></span><span class="text-nowrap">Success (override)</span></div>
<div class="display-6 mb-0">{{ jobs_success_override_count }}</div>
</div>
</div>
</div>
<div class="col-6 col-lg-2">
<div class="card h-100">
<div class="card-body">
<div class="text-muted"><span class="status-dot dot-expected me-2" aria-hidden="true"></span>Expected</div>
<div class="display-6 mb-0">{{ jobs_expected_count }}</div>
</div>
</div>
</div>
<div class="col-6 col-lg-2">
<div class="card h-100">
<div class="card-body">
<div class="text-muted"><span class="status-dot dot-warning me-2" aria-hidden="true"></span>Warning</div>
<div class="display-6 mb-0">{{ jobs_warning_count }}</div>
</div>
</div>
</div>
<div class="col-6 col-lg-2">
<div class="card h-100">
<div class="card-body">
<div class="text-muted"><span class="status-dot dot-failed me-2" aria-hidden="true"></span>Failed</div>
<div class="display-6 mb-0">{{ jobs_error_count }}</div>
</div>
</div>
</div>
<div class="col-6 col-lg-2">
<div class="card h-100">
<div class="card-body">
<div class="text-muted"><span class="status-dot dot-missed me-2" aria-hidden="true"></span>Missed</div>
<div class="display-6 mb-0">{{ jobs_missed_count }}</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="card mb-4">
<div class="card-header">Legend</div>
<div class="card-body">
<div class="d-flex flex-column gap-2 small">
<div><span class="status-dot dot-success me-2" aria-hidden="true"></span><strong>Success</strong> — job run completed successfully</div>
<div><span class="status-dot dot-failed me-2" aria-hidden="true"></span><strong>Failed</strong> — job run failed, action required</div>
<div><span class="status-dot dot-warning me-2" aria-hidden="true"></span><strong>Warning</strong> — job run completed with a warning</div>
<div><span class="status-dot dot-missed me-2" aria-hidden="true"></span><strong>Missed</strong> — job run expected but did not execute</div>
<div><span class="status-dot dot-expected me-2" aria-hidden="true"></span><strong>Expected</strong> — job run not yet due</div>
<div><span class="status-dot dot-override me-2" aria-hidden="true"></span><strong>Success (override)</strong> — marked as successful via override</div>
</div>
</div>
</div>
{% if news_items %}
<div class="card mb-4">
<div class="card-header d-flex align-items-center justify-content-between">
<span>News</span>
{% if active_role == 'admin' %}
<a class="btn btn-sm btn-outline-secondary" href="{{ url_for('main.settings', section='news') }}">Manage</a>
{% endif %}
</div>
<div class="card-body">
<div class="d-flex flex-column gap-3">
{% for item in news_items %}
<div class="border rounded p-3">
<div class="d-flex flex-wrap align-items-center justify-content-between gap-2 mb-2">
<div class="fw-semibold">{{ item.title }}</div>
<div class="d-flex align-items-center gap-2">
{% if item.pinned %}
<span class="badge text-bg-secondary">Pinned</span>
{% endif %}
{% if item.severity == 'warning' %}
<span class="badge text-bg-warning">Warning</span>
{% else %}
<span class="badge text-bg-info">Info</span>
{% endif %}
</div>
</div>
<div class="small text-muted mb-2" style="white-space: pre-wrap;">{{ item.body }}</div>
<div class="d-flex flex-wrap gap-2">
{% if item.link_url %}
<a class="btn btn-sm btn-outline-primary" href="{{ item.link_url }}" target="_blank" rel="noopener">Open link</a>
{% endif %}
<form method="post" action="{{ url_for('main.news_mark_read', news_id=item.id) }}">
<button type="submit" class="btn btn-sm btn-outline-success">Mark as read</button>
</form>
</div>
</div>
{% endfor %}
</div>
</div>
</div>
{% endif %}
<div class="mt-3 small text-muted">
<p>Backupchecks provides a centralized and consistent overview of the health and reliability of all backups within your environment. The platform collects backup results from multiple backup solutions and normalizes them into a single, clear and consistent status model. This enables teams to monitor backup quality across different vendors and environments in a predictable and uniform way.</p>
<p>Backup results are imported and evaluated automatically. Each backup run is analyzed and assigned a status such as Success, Warning, Failed, or Success (override). These statuses are determined by interpreting exit codes, detected error messages, log content, and configured rules, ensuring that the reported outcome reflects the real operational impact rather than raw technical output alone.</p>
<p>The dashboard provides an at-a-glance overview of the current backup situation:</p>
<ul>
<li>A consolidated summary of all monitored backup jobs and their latest known results.</li>
<li>Clear counters for successful, warning, failed, and overridden runs.</li>
<li>Immediate visibility into environments that require attention or follow-up.</li>
</ul>
<p>The Daily Jobs view shows the most recent run per backup job, grouped by customer, backup software, and backup type. This view is intended for high-level monitoring and trend awareness. It reflects the latest state of each job, but it does not replace the daily operational review process.</p>
<p>Daily operational validation is performed from the Run Checks page. This page acts as the primary workspace for reviewing backup runs. All runs that require attention are listed here, allowing operators to systematically review results and decide on the appropriate next step. The main objective of this process is to actively review backup runs and keep the Run Checks page clear.</p>
<p>When reviewing a run, operators assess whether the result is acceptable, requires follow-up, or can be treated as successful. A run can be marked as reviewed once it has been checked, even if additional actions are required. Marking a run as reviewed confirms that the result has been acknowledged and assessed, and prevents it from repeatedly appearing as unprocessed.</p>
<p>If a backup run requires further investigation or corrective action, operators can add a remark or reference an external ticket number. After adding this information, the run can still be marked as reviewed, ensuring that it no longer blocks daily checks.</p>
<p>Reviewed runs that require follow-up retain their status until they are explicitly marked as resolved. The reviewed state remains in place to indicate that the run has been handled operationally, while the resolved state confirms that the underlying issue has been fully addressed.</p>
<p>Overrides can be applied during this process when a warning or error is known, accepted, or considered non-critical. Overrides allow such runs to be treated as successful for reporting and dashboard purposes, while preserving the original messages and maintaining a full audit trail.</p>
<p>The ultimate goal of the Run Checks workflow is to maintain an empty or near-empty Run Checks page.</p>
<p>Backupchecks is designed as a monitoring, validation, and control platform. It does not replace your backup software, but enhances it by adding structured review workflows, consistent reporting, and operational clarity across all backup solutions.</p>
</div>
{% if active_role == 'admin' %}
<div class="card mb-4">
<div class="card-header">
System status
</div>
<div class="card-body">
<div class="row mb-2">
<div class="col-md-6">
<strong>Database size:</strong> {{ db_size_human }}
</div>
<div class="col-md-6">
<strong>Free disk space:</strong>
{% if free_disk_warning %}
<span class="text-danger fw-bold">{{ free_disk_human }}</span>
<span class="text-danger">(mail import will be blocked below 2 GB)</span>
{% else %}
{{ free_disk_human }}
{% endif %}
</div>
</div>
</div>
</div>
{% endif %}
{% endblock %}

View File

@ -0,0 +1,98 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex justify-content-between align-items-center mb-3">
<h1 class="h4 mb-0">Feedback</h1>
<a class="btn btn-primary" href="{{ url_for('main.feedback_new') }}">New</a>
</div>
<form method="get" class="row g-2 mb-3">
<div class="col-12 col-md-3">
<input class="form-control" type="text" name="q" value="{{ q }}" placeholder="Search" />
</div>
<div class="col-6 col-md-2">
<select class="form-select" name="type">
<option value="" {% if item_type == '' %}selected{% endif %}>All types</option>
<option value="bug" {% if item_type == 'bug' %}selected{% endif %}>Bug</option>
<option value="feature" {% if item_type == 'feature' %}selected{% endif %}>Feature</option>
</select>
</div>
<div class="col-6 col-md-2">
<select class="form-select" name="status">
<option value="all" {% if status == 'all' %}selected{% endif %}>All</option>
<option value="open" {% if status == 'open' %}selected{% endif %}>Open</option>
<option value="resolved" {% if status == 'resolved' %}selected{% endif %}>Resolved</option>
</select>
</div>
<div class="col-6 col-md-2">
<select class="form-select" name="sort">
<option value="votes" {% if sort == 'votes' %}selected{% endif %}>Most voted</option>
<option value="newest" {% if sort == 'newest' %}selected{% endif %}>Newest</option>
<option value="updated" {% if sort == 'updated' %}selected{% endif %}>Updated</option>
</select>
</div>
<div class="col-6 col-md-3">
<button class="btn btn-outline-secondary" type="submit">Apply</button>
</div>
</form>
<div class="table-responsive">
<table class="table table-sm align-middle">
<thead>
<tr>
<th style="width: 90px;">Votes</th>
<th>Title</th>
<th style="width: 120px;">Type</th>
<th style="width: 160px;">Component</th>
<th style="width: 120px;">Status</th>
<th style="width: 170px;">Created</th>
</tr>
</thead>
<tbody>
{% if not items %}
<tr>
<td colspan="6" class="text-muted">No items found.</td>
</tr>
{% endif %}
{% for i in items %}
<tr>
<td>
<form method="post" action="{{ url_for('main.feedback_vote', item_id=i.id) }}">
<input type="hidden" name="ref" value="list" />
<button type="submit" class="btn btn-sm {% if i.user_voted %}btn-success{% else %}btn-outline-secondary{% endif %}">
+ {{ i.vote_count }}
</button>
</form>
</td>
<td>
<a href="{{ url_for('main.feedback_detail', item_id=i.id) }}">{{ i.title }}</a>
{% if i.created_by %}
<div class="text-muted" style="font-size: 0.85rem;">by {{ i.created_by }}</div>
{% endif %}
</td>
<td>
{% if i.item_type == 'bug' %}
<span class="badge text-bg-danger">Bug</span>
{% else %}
<span class="badge text-bg-primary">Feature</span>
{% endif %}
</td>
<td>{{ i.component or '-' }}</td>
<td>
{% if i.status == 'resolved' %}
<span class="badge text-bg-success">Resolved</span>
{% else %}
<span class="badge text-bg-warning">Open</span>
{% endif %}
</td>
<td>
<div>{{ i.created_at }}</div>
<div class="text-muted" style="font-size: 0.85rem;">Updated {{ i.updated_at }}</div>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% endblock %}

View File

@ -0,0 +1,80 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex justify-content-between align-items-center mb-3">
<div>
<h1 class="h4 mb-1">{{ item.title }}</h1>
<div class="text-muted" style="font-size: 0.9rem;">
{% if item.item_type == 'bug' %}
<span class="badge text-bg-danger">Bug</span>
{% else %}
<span class="badge text-bg-primary">Feature</span>
{% endif %}
{% if item.status == 'resolved' %}
<span class="badge text-bg-success">Resolved</span>
{% else %}
<span class="badge text-bg-warning">Open</span>
{% endif %}
<span class="ms-2">by {{ created_by_name }}</span>
</div>
</div>
<a class="btn btn-outline-secondary" href="{{ url_for('main.feedback_page') }}">Back</a>
</div>
<div class="row g-3">
<div class="col-12 col-lg-8">
<div class="card">
<div class="card-body">
{% if item.component %}
<div class="mb-2"><strong>Component:</strong> {{ item.component }}</div>
{% endif %}
<div style="white-space: pre-wrap;">{{ item.description }}</div>
</div>
<div class="card-footer d-flex justify-content-between align-items-center">
<div class="text-muted" style="font-size: 0.9rem;">
Created {{ item.created_at }}
<span class="mx-1"></span>
Updated {{ item.updated_at }}
{% if item.status == 'resolved' and item.resolved_at %}
<span class="mx-1"></span>
Resolved {{ item.resolved_at }}{% if resolved_by_name %} by {{ resolved_by_name }}{% endif %}
{% endif %}
</div>
<form method="post" action="{{ url_for('main.feedback_vote', item_id=item.id) }}">
<button type="submit" class="btn btn-sm {% if user_voted %}btn-success{% else %}btn-outline-secondary{% endif %}">
+ {{ vote_count }}
</button>
</form>
</div>
</div>
</div>
<div class="col-12 col-lg-4">
<div class="card">
<div class="card-body">
<h2 class="h6">Actions</h2>
{% if active_role == 'admin' %}
{% if item.status == 'resolved' %}
<form method="post" action="{{ url_for('main.feedback_resolve', item_id=item.id) }}" class="mb-2">
<input type="hidden" name="action" value="reopen" />
<button type="submit" class="btn btn-outline-secondary w-100">Reopen</button>
</form>
{% else %}
<form method="post" action="{{ url_for('main.feedback_resolve', item_id=item.id) }}" class="mb-2">
<input type="hidden" name="action" value="resolve" />
<button type="submit" class="btn btn-success w-100">Mark as resolved</button>
</form>
{% endif %}
<form method="post" action="{{ url_for('main.feedback_delete', item_id=item.id) }}" onsubmit="return confirm('Delete this item?');">
<button type="submit" class="btn btn-danger w-100">Delete</button>
</form>
{% else %}
<div class="text-muted">Only administrators can resolve or delete items.</div>
{% endif %}
</div>
</div>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,37 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex justify-content-between align-items-center mb-3">
<h1 class="h4 mb-0">New Feedback</h1>
<a class="btn btn-outline-secondary" href="{{ url_for('main.feedback_page') }}">Back</a>
</div>
<form method="post" class="card">
<div class="card-body">
<div class="row g-3">
<div class="col-12 col-md-3">
<label class="form-label">Type</label>
<select name="item_type" class="form-select" required>
<option value="bug">Bug</option>
<option value="feature">Feature</option>
</select>
</div>
<div class="col-12 col-md-9">
<label class="form-label">Title</label>
<input type="text" name="title" class="form-control" required />
</div>
<div class="col-12">
<label class="form-label">Description</label>
<textarea name="description" class="form-control" rows="8" required></textarea>
</div>
<div class="col-12 col-md-6">
<label class="form-label">Component (optional)</label>
<input type="text" name="component" class="form-control" />
</div>
</div>
</div>
<div class="card-footer d-flex justify-content-end">
<button type="submit" class="btn btn-primary">Create</button>
</div>
</form>
{% endblock %}

View File

@ -0,0 +1,331 @@
{% extends "layout/base.html" %}
<style>
/* Inbox popup: wider + internal scroll areas */
.modal-xxl { max-width: 98vw; }
@media (min-width: 1400px) { .modal-xxl { max-width: 1400px; } }
#msg_body_container_iframe { height: 55vh; }
#msg_objects_container { max-height: 25vh; overflow: auto; }
</style>
{# Pager macro must be defined before it is used #}
{% macro pager(position, page, total_pages, has_prev, has_next) -%}
<div class="d-flex justify-content-between align-items-center my-2">
<div>
{% if has_prev %}
<a class="btn btn-outline-secondary btn-sm" href="{{ url_for('main.inbox', page=page-1) }}">Previous</a>
{% else %}
<button class="btn btn-outline-secondary btn-sm" disabled>Previous</button>
{% endif %}
{% if has_next %}
<a class="btn btn-outline-secondary btn-sm ms-2" href="{{ url_for('main.inbox', page=page+1) }}">Next</a>
{% else %}
<button class="btn btn-outline-secondary btn-sm ms-2" disabled>Next</button>
{% endif %}
</div>
{% if current_user.is_authenticated and active_role in ["admin", "operator"] %}
<form method="POST" action="{{ url_for('main.inbox_reparse_all') }}" class="me-3 mb-0">
<button type="submit" class="btn btn-outline-secondary btn-sm">Re-parse all</button>
</form>
{% endif %}
<div class="d-flex align-items-center">
<span class="me-2">Page {{ page }} of {{ total_pages }}</span>
<form method="get" class="d-flex align-items-center mb-0">
<label for="page_{{ position }}" class="form-label me-1 mb-0">Go to:</label>
<input
type="number"
min="1"
max="{{ total_pages }}"
class="form-control form-control-sm me-1"
id="page_{{ position }}"
name="page"
value="{{ page }}"
style="width: 5rem;"
/>
<button type="submit" class="btn btn-primary btn-sm">Go</button>
</form>
</div>
</div>
{%- endmacro %}
{% block content %}
<h2 class="mb-3">Inbox</h2>
{{ pager("top", page, total_pages, has_prev, has_next) }}
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">From</th>
<th scope="col">Subject</th>
<th scope="col">Date / time</th>
<th scope="col">Backup</th>
<th scope="col">Type</th>
<th scope="col">Job name</th>
<th scope="col">Overall</th>
<th scope="col">Parsed</th>
<th scope="col">EML</th>
</tr>
</thead>
<tbody>
{% if rows %}
{% for row in rows %}
<tr class="inbox-row" data-message-id="{{ row.id }}" style="cursor: pointer;">
<td>{{ row.from_address }}</td>
<td>{{ row.subject }}</td>
<td>{{ row.received_at }}</td>
<td>{{ row.backup_software }}</td>
<td>{{ row.backup_type }}</td>
<td>{{ row.job_name }}</td>
<td>{{ row.overall_status }}</td>
<td>{{ row.parsed_at }}</td>
<td>
{% if row.has_eml %}
<a class="eml-download" href="{{ url_for('main.inbox_message_eml', message_id=row.id) }}">EML</a>
{% endif %}
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="10" class="text-center text-muted py-3">
No messages found.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{{ pager("bottom", page, total_pages, has_prev, has_next) }}
<!-- Inline popup modal for message details -->
<div class="modal fade" id="inboxMessageModal" tabindex="-1" aria-labelledby="inboxMessageModalLabel" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable modal-xxl">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="inboxMessageModalLabel">Message details</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="row">
<div class="col-md-3">
<dl class="row mb-0">
<dt class="col-4">From</dt>
<dd class="col-8" id="msg_from"></dd>
<dt class="col-4">Backup</dt>
<dd class="col-8" id="msg_backup"></dd>
<dt class="col-4">Type</dt>
<dd class="col-8" id="msg_type"></dd>
<dt class="col-4">Job</dt>
<dd class="col-8" id="msg_job"></dd>
<dt class="col-4">Overall</dt>
<dd class="col-8" id="msg_overall"></dd>
<dt class="col-4">Customer</dt>
<dd class="col-8">
{% if current_user.is_authenticated and active_role in ["admin", "operator"] %}
<input id="msg_customer_input" class="form-control form-control-sm" list="customerList" placeholder="Select customer" autocomplete="off" />
<datalist id="customerList">
{% for c in customers %}
<option value="{{ c.name }}"></option>
{% endfor %}
</datalist>
{% else %}
<span id="msg_customer_display"></span>
{% endif %}
</dd>
<dt class="col-4">Received</dt>
<dd class="col-8" id="msg_received"></dd>
<dt class="col-4">Parsed</dt>
<dd class="col-8" id="msg_parsed"></dd>
<dt class="col-4">Details</dt>
<dd class="col-8" id="msg_overall_message" style="white-space: pre-wrap;"></dd>
</dl>
</div>
<div class="col-md-9">
<div class="border rounded p-2 p-0" style="overflow:hidden;">
<iframe id="msg_body_container_iframe" class="w-100" style="height:55vh; border:0; background:transparent;" sandbox="allow-popups allow-popups-to-escape-sandbox allow-top-navigation-by-user-activation"></iframe>
</div>
<div class="mt-3">
<h6>Objects</h6>
<div id="msg_objects_container">
<!-- Parsed objects will be rendered here -->
</div>
</div>
</div>
</div>
</div>
<div class="modal-footer">
{% if current_user.is_authenticated and active_role in ["admin", "operator"] %}
<form id="inboxApproveForm" method="POST" action="" class="me-auto mb-0">
<input type="hidden" id="msg_customer_id" name="customer_id" value="" />
<button type="submit" class="btn btn-primary">Approve job</button>
</form>
<form id="inboxDeleteForm" method="POST" action="" class="mb-0">
<button type="submit" class="btn btn-outline-danger" onclick="return confirm('Delete this message from the Inbox?');">Delete</button>
</form>
{% endif %}
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
(function () {
var customers = {{ customers|tojson|safe }};
function wrapMailHtml(html) {
html = html || "";
// Ensure we render the mail HTML with its own CSS, isolated from the site styling.
return (
"<!doctype html><html><head><meta charset=\"utf-8\">" +
"<base target=\"_blank\">" +
"</head><body style=\"margin:0; padding:8px;\">" +
html +
"</body></html>"
);
}
function findCustomerIdByName(name) {
if (!name) return null;
for (var i = 0; i < customers.length; i++) {
if (customers[i].name === name) return customers[i].id;
}
return null;
}
function renderObjects(objects) {
var container = document.getElementById("msg_objects_container");
if (!container) return;
if (!objects || !objects.length) {
container.innerHTML = "<p class=\"text-muted mb-0\">No objects parsed for this message.</p>";
return;
}
var html = "<div class=\"table-responsive\"><table class=\"table table-sm table-bordered mb-0\">";
html += "<thead><tr><th>Object</th><th>Type</th><th>Status</th><th>Error</th></tr></thead><tbody>";
for (var i = 0; i < objects.length; i++) {
var o = objects[i] || {};
html += "<tr>";
html += "<td>" + (o.name || "") + "</td>";
html += "<td>" + (o.type || "") + "</td>";
html += "<td>" + (o.status || "") + "</td>";
html += "<td>" + (o.error_message || "") + "</td>";
html += "</tr>";
}
html += "</tbody></table></div>";
container.innerHTML = html;
}
function attachHandlers() {
var emlLinks = document.querySelectorAll("a.eml-download");
emlLinks.forEach(function (a) {
a.addEventListener("click", function (ev) {
ev.stopPropagation();
});
});
var rows = document.querySelectorAll("tr.inbox-row");
var modalEl = document.getElementById("inboxMessageModal");
if (!modalEl) return;
var modal = new bootstrap.Modal(modalEl);
rows.forEach(function (row) {
row.addEventListener("click", function () {
var id = row.getAttribute("data-message-id");
if (!id) return;
fetch("{{ url_for('main.inbox_message_detail', message_id=0) }}".replace("0", id))
.then(function (resp) {
if (!resp.ok) throw new Error("Failed to load message details");
return resp.json();
})
.then(function (data) {
if (data.status !== "ok") throw new Error("Unexpected response");
var meta = data.meta || {};
document.getElementById("inboxMessageModalLabel").textContent = meta.subject || "Message details";
document.getElementById("msg_from").textContent = meta.from_address || "";
document.getElementById("msg_backup").textContent = meta.backup_software || "";
document.getElementById("msg_type").textContent = meta.backup_type || "";
document.getElementById("msg_job").textContent = meta.job_name || "";
document.getElementById("msg_overall").textContent = meta.overall_status || "";
document.getElementById("msg_overall_message").textContent = meta.overall_message || "";
document.getElementById("msg_received").textContent = meta.received_at || "";
document.getElementById("msg_parsed").textContent = meta.parsed_at || "";
var bodyFrame = document.getElementById("msg_body_container_iframe");
if (bodyFrame) bodyFrame.srcdoc = wrapMailHtml(data.body_html || "");
renderObjects(data.objects || []);
var customerName = meta.customer_name || "";
var approveForm = document.getElementById("inboxApproveForm");
{% if current_user.is_authenticated and active_role in ["admin", "operator"] %}
var customerInput = document.getElementById("msg_customer_input");
var customerIdField = document.getElementById("msg_customer_id");
if (customerInput) customerInput.value = customerName;
if (customerIdField) {
var existingId = findCustomerIdByName(customerName);
customerIdField.value = existingId !== null ? String(existingId) : "";
}
if (approveForm) {
approveForm.action = "{{ url_for('main.inbox_message_approve', message_id=0) }}".replace("0", id);
approveForm.onsubmit = function (ev) {
if (!customerInput || !customerIdField) return;
var cid = findCustomerIdByName(customerInput.value);
if (!cid) {
ev.preventDefault();
alert("Please select an existing customer name from the list.");
return false;
}
customerIdField.value = String(cid);
};
}
var deleteForm = document.getElementById("inboxDeleteForm");
if (deleteForm) {
deleteForm.action = "{{ url_for('main.inbox_message_delete', message_id=0) }}".replace("0", id);
}
{% else %}
var customerDisplay = document.getElementById("msg_customer_display");
if (customerDisplay) customerDisplay.textContent = customerName || "";
if (approveForm) approveForm.style.display = "none";
var deleteForm = document.getElementById("inboxDeleteForm");
if (deleteForm) deleteForm.style.display = "none";
{% endif %}
modal.show();
})
.catch(function (err) {
console.error(err);
});
});
});
}
document.addEventListener("DOMContentLoaded", attachHandlers);
})();
</script>
{% endblock %}

View File

@ -0,0 +1,200 @@
{% extends "layout/base.html" %}
{# Pager macro must be defined before it is used #}
{% macro pager(position, page, total_pages, has_prev, has_next) -%}
<div class="d-flex justify-content-between align-items-center my-2">
<div>
{% if has_prev %}
<a class="btn btn-outline-secondary btn-sm" href="{{ url_for('main.inbox_deleted_mails', page=page-1) }}">Previous</a>
{% else %}
<button class="btn btn-outline-secondary btn-sm" disabled>Previous</button>
{% endif %}
{% if has_next %}
<a class="btn btn-outline-secondary btn-sm ms-2" href="{{ url_for('main.inbox_deleted_mails', page=page+1) }}">Next</a>
{% else %}
<button class="btn btn-outline-secondary btn-sm ms-2" disabled>Next</button>
{% endif %}
</div>
<div class="d-flex align-items-center">
<span class="me-2">Page {{ page }} of {{ total_pages }}</span>
<form method="get" class="d-flex align-items-center mb-0">
<label for="page_{{ position }}" class="form-label me-1 mb-0">Go to:</label>
<input
type="number"
min="1"
max="{{ total_pages }}"
class="form-control form-control-sm me-1"
id="page_{{ position }}"
name="page"
value="{{ page }}"
style="width: 5rem;"
/>
<button type="submit" class="btn btn-primary btn-sm">Go</button>
</form>
</div>
</div>
{%- endmacro %}
{% block content %}
<h2 class="mb-3">Deleted mails</h2>
{{ pager("top", page, total_pages, has_prev, has_next) }}
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">From</th>
<th scope="col">Subject</th>
<th scope="col">Received</th>
<th scope="col">Deleted by</th>
<th scope="col">Deleted at</th>
<th scope="col">EML</th>
<th scope="col">Actions</th>
</tr>
</thead>
<tbody>
{% if rows %}
{% for row in rows %}
<tr class="deleted-mail-row" data-message-id="{{ row.id }}" style="cursor: pointer;">
<td>{{ row.from_address }}</td>
<td>{{ row.subject }}</td>
<td>{{ row.received_at }}</td>
<td>{{ row.deleted_by }}</td>
<td>{{ row.deleted_at }}</td>
<td>
{% if row.has_eml %}
<a class="eml-download" href="{{ url_for('main.inbox_message_eml', message_id=row.id) }}">EML</a>
{% endif %}
</td>
<td>
<form method="POST" action="{{ url_for('main.inbox_deleted_restore', message_id=row.id) }}" class="mb-0 restore-form" onsubmit="return confirm('Restore this message to the Inbox?');">
<button type="submit" class="btn btn-sm btn-outline-primary">Restore</button>
</form>
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="7" class="text-center text-muted py-3">
No deleted messages found.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{{ pager("bottom", page, total_pages, has_prev, has_next) }}
<!-- Inline popup modal for deleted message details -->
<div class="modal fade" id="deletedMailModal" tabindex="-1" aria-labelledby="deletedMailModalLabel" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable modal-xxl">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="deletedMailModalLabel">Message details</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="row">
<div class="col-md-3">
<dl class="row mb-0">
<dt class="col-4">From</dt>
<dd class="col-8" id="dmsg_from"></dd>
<dt class="col-4">Received</dt>
<dd class="col-8" id="dmsg_received"></dd>
<dt class="col-4">Deleted by</dt>
<dd class="col-8" id="dmsg_deleted_by"></dd>
<dt class="col-4">Deleted at</dt>
<dd class="col-8" id="dmsg_deleted_at"></dd>
</dl>
</div>
<div class="col-md-9">
<div class="border rounded p-2 p-0" style="overflow:hidden;">
<iframe id="dmsg_body_iframe" class="w-100" style="height:60vh; border:0; background:transparent;" sandbox="allow-popups allow-popups-to-escape-sandbox allow-top-navigation-by-user-activation"></iframe>
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
(function () {
function wrapMailHtml(html) {
html = html || "";
return (
"<!doctype html><html><head><meta charset=\"utf-8\">" +
"<base target=\"_blank\">" +
"</head><body style=\"margin:0; padding:8px;\">" +
html +
"</body></html>"
);
}
function attachHandlers() {
var emlLinks = document.querySelectorAll("a.eml-download");
emlLinks.forEach(function (a) {
a.addEventListener("click", function (ev) {
ev.stopPropagation();
});
});
var restoreForms = document.querySelectorAll("form.restore-form");
restoreForms.forEach(function (f) {
f.addEventListener("click", function (ev) {
ev.stopPropagation();
});
});
var rows = document.querySelectorAll("tr.deleted-mail-row");
var modalEl = document.getElementById("deletedMailModal");
if (!modalEl) return;
var modal = new bootstrap.Modal(modalEl);
rows.forEach(function (row) {
row.addEventListener("click", function () {
var id = row.getAttribute("data-message-id");
if (!id) return;
fetch("{{ url_for('main.inbox_message_detail', message_id=0) }}".replace("0", id))
.then(function (resp) {
if (!resp.ok) throw new Error("Failed to load message details");
return resp.json();
})
.then(function (data) {
if (data.status !== "ok") throw new Error("Unexpected response");
var meta = data.meta || {};
document.getElementById("deletedMailModalLabel").textContent = meta.subject || "Message details";
document.getElementById("dmsg_from").textContent = meta.from_address || "";
document.getElementById("dmsg_received").textContent = meta.received_at || "";
document.getElementById("dmsg_deleted_by").textContent = meta.deleted_by || "";
document.getElementById("dmsg_deleted_at").textContent = meta.deleted_at || "";
var bodyFrame = document.getElementById("dmsg_body_iframe");
if (bodyFrame) bodyFrame.srcdoc = wrapMailHtml(data.body_html || "");
modal.show();
})
.catch(function (err) {
console.error(err);
});
});
});
}
document.addEventListener("DOMContentLoaded", attachHandlers);
})();
</script>
{% endblock %}

View File

@ -0,0 +1,386 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Job details</h2>
<div class="card mb-3">
<div class="card-body">
<dl class="row mb-0">
<dt class="col-sm-3">Customer</dt>
<dd class="col-sm-9">{{ job.customer.name if job.customer else "" }}</dd>
<dt class="col-sm-3">Backup</dt>
<dd class="col-sm-9">{{ job.backup_software }}</dd>
<dt class="col-sm-3">Type</dt>
<dd class="col-sm-9">{{ job.backup_type }}</dd>
<dt class="col-sm-3">Job name</dt>
<dd class="col-sm-9">{{ job.job_name }}</dd>
<dt class="col-sm-3">Tickets</dt>
<dd class="col-sm-9">{{ ticket_open_count }} open / {{ ticket_total_count }} total</dd>
<dt class="col-sm-3">Remarks</dt>
<dd class="col-sm-9">{{ remark_open_count }} open / {{ remark_total_count }} total</dd>
{% if schedule_map %}
<dt class="col-sm-3">Schedule (inferred)</dt>
<dd class="col-sm-9">
<div class="table-responsive">
<table class="table table-sm table-bordered mb-0">
<thead class="table-light">
<tr><th style="width: 120px;">Day</th><th>Times (15 min blocks)</th></tr>
</thead>
<tbody>
<tr><td>Mon</td><td>{{ ', '.join(schedule_map[0]) if schedule_map[0] else '—' }}</td></tr>
<tr><td>Tue</td><td>{{ ', '.join(schedule_map[1]) if schedule_map[1] else '—' }}</td></tr>
<tr><td>Wed</td><td>{{ ', '.join(schedule_map[2]) if schedule_map[2] else '—' }}</td></tr>
<tr><td>Thu</td><td>{{ ', '.join(schedule_map[3]) if schedule_map[3] else '—' }}</td></tr>
<tr><td>Fri</td><td>{{ ', '.join(schedule_map[4]) if schedule_map[4] else '—' }}</td></tr>
<tr><td>Sat</td><td>{{ ', '.join(schedule_map[5]) if schedule_map[5] else '—' }}</td></tr>
<tr><td>Sun</td><td>{{ ', '.join(schedule_map[6]) if schedule_map[6] else '—' }}</td></tr>
</tbody>
</table>
</div>
</dd>
{% endif %}
</dl>
</div>
</div>
{% if can_manage_jobs %}
<form method="post" action="{{ url_for('main.job_delete', job_id=job.id) }}" class="mb-3" onsubmit="return confirm('Are you sure you want to delete this job? Related mails will be returned to the Inbox.');">
<button type="submit" class="btn btn-outline-danger">Delete job</button>
</form>
{% endif %}
<h3 class="mt-4 mb-3">Job history</h3>
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">Day run</th>
<th scope="col">Run time</th>
<th scope="col">Status</th>
<th scope="col">Tickets</th>
<th scope="col">Remarks</th>
<th scope="col">Override</th>
{% if active_role == 'admin' %}
<th scope="col">Reviewed by</th>
<th scope="col">Reviewed at</th>
{% endif %}
</tr>
</thead>
<tbody>
{% if history_rows %}
{% for r in history_rows %}
<tr{% if r.mail_message_id %} class="jobrun-row" data-message-id="{{ r.mail_message_id }}" data-ticket-codes="{{ (r.ticket_codes or [])|tojson|forceescape }}" data-remark-items="{{ (r.remark_items or [])|tojson|forceescape }}" style="cursor: pointer;"{% endif %}>
<td>{{ r.run_day }}</td>
<td>{{ r.run_at }}</td>
{% set _s = (r.status or "")|lower %}
{% set _is_override = (r.override_applied is defined and r.override_applied) or ('override' in _s) %}
{% set _dot = '' %}
{% if _is_override %}{% set _dot = 'dot-override' %}
{% elif _s == 'success' %}{% set _dot = 'dot-success' %}
{% elif _s == 'warning' %}{% set _dot = 'dot-warning' %}
{% elif _s == 'error' %}{% set _dot = 'dot-failed' %}
{% elif _s == 'failed' %}{% set _dot = 'dot-failed' %}
{% elif _s == 'missed' %}{% set _dot = 'dot-missed' %}
{% endif %}
<td class="status-text {% if _is_override %}status-override{% elif _s == 'success' %}status-success{% elif _s == 'warning' %}status-warning{% elif _s == 'error' %}status-error{% elif _s == 'failed' %}status-failed{% elif _s == 'missed' %}status-missed{% endif %}">{% if _dot %}<span class="status-dot {{ _dot }} me-2" aria-hidden="true"></span>{% endif %}{{ r.status }}</td>
<td>{% if r.ticket_codes %}🎫 {{ r.ticket_codes|join(", ") }}{% endif %}</td>
<td>{% if r.has_remark %}💬{% endif %}</td>
<td class="status-text {% if r.override_applied %}status-override{% endif %}">{% if r.override_applied %}<span class="status-dot dot-override me-2" aria-hidden="true"></span>Override{% endif %}</td>
{% if active_role == 'admin' %}
<td>{{ r.reviewed_by }}</td>
<td>{{ r.reviewed_at }}</td>
{% endif %}
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="{% if active_role == 'admin' %}8{% else %}6{% endif %}" class="text-center text-muted py-3">
No runs found.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{% if total_pages > 1 %}
<div class="d-flex justify-content-between align-items-center my-2">
<div>
{% if has_prev %}
<a class="btn btn-outline-secondary btn-sm" href="{{ url_for('main.job_detail', job_id=job.id, page=page-1) }}">Previous</a>
{% else %}
<button class="btn btn-outline-secondary btn-sm" disabled>Previous</button>
{% endif %}
{% if has_next %}
<a class="btn btn-outline-secondary btn-sm ms-2" href="{{ url_for('main.job_detail', job_id=job.id, page=page+1) }}">Next</a>
{% else %}
<button class="btn btn-outline-secondary btn-sm ms-2" disabled>Next</button>
{% endif %}
</div>
<div class="d-flex align-items-center">
<span class="me-2">Page {{ page }} of {{ total_pages }}</span>
<form method="get" class="d-flex align-items-center mb-0">
<label for="page_input" class="form-label me-1 mb-0">Go to:</label>
<input
type="number"
min="1"
max="{{ total_pages }}"
class="form-control form-control-sm me-1"
id="page_input"
name="page"
value="{{ page }}"
style="width: 5rem;"
/>
<button type="submit" class="btn btn-primary btn-sm">Go</button>
</form>
</div>
</div>
{% endif %}
<style>
/* Job run popup: wide + internal scroll areas */
.modal-xxl { max-width: 98vw; }
@media (min-width: 1400px) { .modal-xxl { max-width: 1400px; } }
#run_msg_body_container_iframe { height: 55vh; }
#run_msg_objects_container { max-height: 25vh; overflow: auto; }
</style>
<!-- Inline popup modal for run message details -->
<div class="modal fade" id="jobRunMessageModal" tabindex="-1" aria-labelledby="jobRunMessageModalLabel" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable modal-xxl">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="jobRunMessageModalLabel">Run details</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="row">
<div class="col-md-3">
<dl class="row mb-0">
<dt class="col-4">From</dt>
<dd class="col-8" id="run_msg_from"></dd>
<dt class="col-4">Backup</dt>
<dd class="col-8" id="run_msg_backup"></dd>
<dt class="col-4">Type</dt>
<dd class="col-8" id="run_msg_type"></dd>
<dt class="col-4">Ticket</dt>
<dd class="col-8" id="run_msg_ticket"></dd>
<dt class="col-4">Remark</dt>
<dd class="col-8" id="run_msg_remark"></dd>
<dt class="col-4">Job</dt>
<dd class="col-8" id="run_msg_job"></dd>
<dt class="col-4">Overall</dt>
<dd class="col-8" id="run_msg_overall"></dd>
<dt class="col-4">Message</dt>
<dd class="col-8" id="run_msg_overall_message" style="white-space: pre-wrap;"></dd>
<dt class="col-4">Customer</dt>
<dd class="col-8" id="run_msg_customer"></dd>
<dt class="col-4">Received</dt>
<dd class="col-8" id="run_msg_received"></dd>
<dt class="col-4">Parsed</dt>
<dd class="col-8" id="run_msg_parsed"></dd>
</dl>
</div>
<div class="col-md-9">
<div class="border rounded p-2 p-0" style="overflow:hidden;">
<iframe id="run_msg_body_container_iframe" class="w-100" style="height:55vh; border:0; background:transparent;" sandbox="allow-popups allow-popups-to-escape-sandbox allow-top-navigation-by-user-activation"></iframe>
</div>
<div class="mt-3">
<h6>Objects</h6>
<div id="run_msg_objects_container">
<!-- Parsed objects will be rendered here -->
</div>
</div>
</div>
</div>
</div>
<div class="modal-footer">
<a id="run_msg_eml_btn" class="btn btn-outline-primary" href="#" style="display:none;" rel="nofollow">Download EML</a>
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
function statusClass(status) {
var s = (status || "").toString().toLowerCase();
if (s === "success") return "status-success";
if (s === "warning") return "status-warning";
if (s === "error") return "status-error";
if (s === "failed") return "status-failed";
if (s === "missed") return "status-missed";
if (s === "expected") return "status-expected";
return "";
}
function statusDotClass(status) {
var s = (status || "").toString().toLowerCase();
if (s === "success") return "dot-success";
if (s === "warning") return "dot-warning";
if (s === "error") return "dot-failed";
if (s === "failed") return "dot-failed";
if (s === "missed") return "dot-missed";
if (s === "expected") return "dot-expected";
return "";
}
function escapeHtml(s) {
return (s || "").toString()
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/\"/g, "&quot;")
.replace(/'/g, "&#39;");
}
(function () {
function wrapMailHtml(html) {
html = html || "";
// Ensure we render the mail HTML with its own CSS, isolated from the site styling.
return (
"<!doctype html><html><head><meta charset=\"utf-8\">" +
"<base target=\"_blank\">" +
"</head><body style=\"margin:0; padding:8px;\">" +
html +
"</body></html>"
);
}
function renderObjects(objects) {
var container = document.getElementById("run_msg_objects_container");
if (!container) return;
if (!objects || !objects.length) {
container.innerHTML = "<p class=\"text-muted mb-0\">No objects stored for this run.</p>";
return;
}
var html = "<div class=\"table-responsive\"><table class=\"table table-sm table-bordered mb-0\">";
html += "<thead><tr><th>Object</th><th>Type</th><th>Status</th><th>Error</th></tr></thead><tbody>";
for (var i = 0; i < objects.length; i++) {
var o = objects[i] || {};
html += "<tr>";
html += "<td>" + (o.name || "") + "</td>";
html += "<td>" + (o.type || "") + "</td>";
var d = statusDotClass(o.status);
html += "<td class=\"status-text " + statusClass(o.status) + "\">" + (d ? ('<span class=\\\"status-dot ' + d + ' me-2\\\" aria-hidden=\\\"true\\\"></span>') : '') + escapeHtml(o.status || "") + "</td>";
html += "<td>" + (o.error_message || "") + "</td>";
html += "</tr>";
}
html += "</tbody></table></div>";
container.innerHTML = html;
}
function attachHandlers() {
var rows = document.querySelectorAll("tr.jobrun-row");
var modalEl = document.getElementById("jobRunMessageModal");
if (!modalEl) return;
var modal = new bootstrap.Modal(modalEl);
rows.forEach(function (row) {
row.addEventListener("click", function () {
var messageId = row.getAttribute("data-message-id");
if (!messageId) return;
fetch("{{ url_for('main.inbox_message_detail', message_id=0) }}".replace("0", messageId))
.then(function (resp) {
if (!resp.ok) throw new Error("Failed to load message details");
return resp.json();
})
.then(function (data) {
if (data.status !== "ok") throw new Error("Unexpected response");
var meta = data.meta || {};
var ticketCodes = [];
var remarkItems = [];
try { ticketCodes = JSON.parse(row.getAttribute("data-ticket-codes") || "[]"); } catch (e) { ticketCodes = []; }
try { remarkItems = JSON.parse(row.getAttribute("data-remark-items") || "[]"); } catch (e) { remarkItems = []; }
var ticketEl = document.getElementById("run_msg_ticket");
if (ticketEl) ticketEl.textContent = (ticketCodes && ticketCodes.length) ? ticketCodes.join(", ") : "";
var remarkEl = document.getElementById("run_msg_remark");
if (remarkEl) {
if (remarkItems && remarkItems.length) {
var parts = [];
for (var i = 0; i < remarkItems.length; i++) {
var it = remarkItems[i] || {};
var t = (it.title || "").toString().trim();
var b = (it.body || "").toString().trim();
if (t && b) parts.push(t + ": " + b);
else if (b) parts.push(b);
else if (t) parts.push(t);
}
remarkEl.textContent = parts.join(" | ");
} else {
remarkEl.textContent = "";
}
}
document.getElementById("jobRunMessageModalLabel").textContent = meta.subject || "Run details";
document.getElementById("run_msg_from").textContent = meta.from_address || "";
document.getElementById("run_msg_backup").textContent = meta.backup_software || "";
document.getElementById("run_msg_type").textContent = meta.backup_type || "";
document.getElementById("run_msg_job").textContent = meta.job_name || "";
var overallEl = document.getElementById("run_msg_overall");
if (overallEl) {
var d = statusDotClass(meta.overall_status);
overallEl.innerHTML = (d ? ('<span class="status-dot ' + d + ' me-2" aria-hidden="true"></span>') : '') + escapeHtml(meta.overall_status || "");
}
document.getElementById("run_msg_overall_message").textContent = meta.overall_message || "";
document.getElementById("run_msg_customer").textContent = meta.customer_name || "";
document.getElementById("run_msg_received").textContent = meta.received_at || "";
document.getElementById("run_msg_parsed").textContent = meta.parsed_at || "";
var emlBtn = document.getElementById("run_msg_eml_btn");
if (emlBtn) {
if (meta.has_eml) {
emlBtn.href = "{{ url_for('main.message_eml', message_id=0) }}".replace("0", messageId);
emlBtn.style.display = "inline-block";
} else {
emlBtn.href = "#";
emlBtn.style.display = "none";
}
}
var bodyFrame = document.getElementById("run_msg_body_container_iframe");
if (bodyFrame) bodyFrame.srcdoc = wrapMailHtml(data.body_html || "");
renderObjects(data.objects || []);
modal.show();
})
.catch(function (err) {
console.error(err);
});
});
});
}
document.addEventListener("DOMContentLoaded", attachHandlers);
})();
</script>
{% endblock %}

View File

@ -0,0 +1,35 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Jobs</h2>
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">Customer</th>
<th scope="col">Backup</th>
<th scope="col">Type</th>
<th scope="col">Job name</th>
</tr>
</thead>
<tbody>
{% if jobs %}
{% for j in jobs %}
<tr style="cursor: pointer;" onclick="window.location='{{ url_for('main.job_detail', job_id=j.id) }}'">
<td>{{ j.customer_name }}</td>
<td>{{ j.backup_software }}</td>
<td>{{ j.backup_type }}</td>
<td>{{ j.job_name }}</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="4" class="text-center text-muted py-3">
No jobs found.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{% endblock %}

View File

@ -0,0 +1,90 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-4">Logging</h2>
<style>
/* Keep logging table columns fixed to prevent shifting based on content */
table.logging-table-fixed { table-layout: fixed; width: 100%; }
table.logging-table-fixed th, table.logging-table-fixed td { overflow: hidden; }
table.logging-table-fixed th.col-time, table.logging-table-fixed td.col-time,
table.logging-table-fixed th.col-user, table.logging-table-fixed td.col-user,
table.logging-table-fixed th.col-action, table.logging-table-fixed td.col-action,
table.logging-table-fixed th.col-entity, table.logging-table-fixed td.col-entity {
white-space: nowrap;
text-overflow: ellipsis;
}
table.logging-table-fixed th.col-details, table.logging-table-fixed td.col-details {
white-space: normal;
word-break: break-word;
}
</style>
<div class="card">
<div class="card-header">
Admin activity (last 7 days)
</div>
<div class="card-body">
<div class="table-responsive">
<table class="table table-sm table-hover align-middle logging-table-fixed w-100">
<colgroup>
<col style="width: 160px;">
<col style="width: 140px;">
<col style="width: 170px;">
<col style="width: 240px;">
<col style="width: auto;">
</colgroup>
<thead class="table-light">
<tr>
<th scope="col" class="col-time">Time</th>
<th scope="col" class="col-user">User</th>
<th scope="col">Event</th>
<th scope="col">Message</th>
<th scope="col" class="col-details">Details</th>
</tr>
</thead>
<tbody>
{% if logs %}
{% for log in logs %}
<tr>
<td>{{ log.created_at }}</td>
<td>{{ log.user or "-" }}</td>
<td>{{ log.event_type }}</td>
<td>{{ log.message }}</td>
<td class="text-muted col-details">{{ log.details }}</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="5" class="text-center text-muted py-3">
No log entries available.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{% if total_pages and total_pages > 1 %}
<nav aria-label="Logging pagination" class="mt-3">
<ul class="pagination pagination-sm mb-0">
<li class="page-item {% if not has_prev %}disabled{% endif %}">
<a class="page-link" href="{% if has_prev %}{{ url_for('main.logging_page', page=page-1) }}{% else %}#{% endif %}" aria-label="Previous">
&laquo;
</a>
</li>
<li class="page-item disabled">
<span class="page-link">Page {{ page }} of {{ total_pages }}</span>
</li>
<li class="page-item {% if not has_next %}disabled{% endif %}">
<a class="page-link" href="{% if has_next %}{{ url_for('main.logging_page', page=page+1) }}{% else %}#{% endif %}" aria-label="Next">
&raquo;
</a>
</li>
</ul>
</nav>
{% endif %}
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,249 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Overrides</h2>
{% if can_manage %}
<div class="card mb-4">
<div class="card-header">
Add override
</div>
<div class="card-body">
<form method="post" action="{{ url_for('main.overrides_create') }}" id="overrideForm" class="row g-3">
<input type="hidden" name="override_id" id="ov_override_id" value="">
<div class="col-md-3">
<label for="ov_level" class="form-label">Level</label>
<select class="form-select" id="ov_level" name="level" required>
<option value="global">Global</option>
<option value="object">Object</option>
</select>
</div>
<div class="col-md-3">
<label for="ov_backup_software" class="form-label">Backup software (global)</label>
<select class="form-select" id="ov_backup_software" name="backup_software">
<option value="">Global</option>
{% for bs in backup_software_options %}
<option value="{{ bs }}">{{ bs }}</option>
{% endfor %}
</select>
</div>
<div class="col-md-3">
<label for="ov_backup_type" class="form-label">Backup type (global)</label>
<select class="form-select" id="ov_backup_type" name="backup_type">
<option value="">Global</option>
{% for bt in backup_type_options %}
<option value="{{ bt }}">{{ bt }}</option>
{% endfor %}
</select>
</div>
<div class="col-md-3">
<label for="ov_job_id" class="form-label">Job (object level)</label>
<select class="form-select" id="ov_job_id" name="job_id">
<option value="">-- none --</option>
{% for job in jobs_for_select %}
<option value="{{ job.id }}">
{{ job.customer.name if job.customer else "-" }} / {{ job.backup_software or "-" }} / {{ job.backup_type or "-" }} / {{ job.job_name or "-" }}
</option>
{% endfor %}
</select>
</div>
<div class="col-md-3">
<label for="ov_object_name" class="form-label">Object name (object level)</label>
<input type="text" class="form-control" id="ov_object_name" name="object_name" placeholder="Exact object name">
</div>
<div class="col-md-3">
<label for="ov_match_status" class="form-label">Status</label>
<select class="form-select" id="ov_match_status" name="match_status">
<option value="">Any</option>
<option value="success">Success</option>
<option value="warning">Warning</option>
<option value="failed">Failed</option>
</select>
</div>
<div class="col-md-3">
<label for="ov_match_error_contains" class="form-label">Error contains</label>
<input type="text" class="form-control" id="ov_match_error_contains" name="match_error_contains" placeholder="Text to match in error message">
</div>
<div class="col-md-3">
<label for="ov_start_at" class="form-label">From</label>
<input type="datetime-local" class="form-control" id="ov_start_at" name="start_at">
</div>
<div class="col-md-3">
<label for="ov_end_at" class="form-label">Until</label>
<input type="datetime-local" class="form-control" id="ov_end_at" name="end_at">
</div>
<div class="col-md-3">
<label class="form-label d-block">&nbsp;</label>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="ov_treat_success" name="treat_as_success" checked>
<label class="form-check-label" for="ov_treat_success">
Treat as success
</label>
</div>
</div>
<div class="col-12">
<label for="ov_comment" class="form-label">Comment</label>
<textarea class="form-control" id="ov_comment" name="comment" rows="2" placeholder="Ticket number / reason for override"></textarea>
</div>
<div class="col-12">
<button type="submit" class="btn btn-primary" id="ov_submit_btn">Save override</button>
<button type="button" class="btn btn-outline-secondary ms-2 d-none" id="ov_cancel_edit_btn">Cancel edit</button>
</div>
</form>
</div>
</div>
{% endif %}
<div class="card">
<div class="card-header">
Existing overrides
</div>
<div class="card-body">
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">Level</th>
<th scope="col">Scope</th>
<th scope="col">From</th>
<th scope="col">Until</th>
<th scope="col">Active</th>
<th scope="col">Comment</th>
{% if can_manage %}
<th scope="col">Actions</th>
{% endif %}
</tr>
</thead>
<tbody>
{% if overrides %}
{% for ov in overrides %}
<tr>
<td>{{ ov.level }}</td>
<td>{{ ov.scope }}</td>
<td>{{ ov.start_at }}</td>
<td>{{ ov.end_at or "-" }}</td>
<td>
{% if ov.active %}
<span class="badge bg-success">Active</span>
{% else %}
<span class="badge bg-secondary">Inactive</span>
{% endif %}
</td>
<td>{{ ov.comment }}</td>
{% if can_manage %}
<td>
<button type="button" class="btn btn-sm btn-outline-primary me-1 ov-edit-btn"
data-ov-id="{{ ov.id }}"
data-ov-level="{{ ov.level }}"
data-ov-backup-software="{{ ov.backup_software or '' }}"
data-ov-backup-type="{{ ov.backup_type or '' }}"
data-ov-job-id="{{ ov.job_id or '' }}"
data-ov-object-name="{{ ov.object_name or '' }}"
data-ov-match-status="{{ ov.match_status or '' }}"
data-ov-match-error-contains="{{ ov.match_error_contains or '' }}"
data-ov-treat-as-success="{{ 1 if ov.treat_as_success else 0 }}"
data-ov-comment="{{ ov.comment or '' }}"
data-ov-start-at="{{ ov.start_at_raw or '' }}"
data-ov-end-at="{{ ov.end_at_raw or '' }}">Edit</button>
<form method="post" action="{{ url_for('main.overrides_toggle', override_id=ov.id) }}" class="d-inline">
<button type="submit" class="btn btn-sm btn-outline-secondary">
{% if ov.active %}Disable{% else %}Enable{% endif %}
</button>
</form>
{% if can_delete %}
<form method="post" action="{{ url_for('main.overrides_delete', override_id=ov.id) }}" class="d-inline ms-1"
onsubmit="return confirm('Delete this override?');">
<button type="submit" class="btn btn-sm btn-outline-danger">Delete</button>
</form>
{% endif %}
</td>
{% endif %}
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="{% if can_manage %}7{% else %}6{% endif %}" class="text-center text-muted py-3">
No overrides defined.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
</div>
</div>
<script>
(function () {
const form = document.getElementById('overrideForm');
if (!form) return;
const submitBtn = document.getElementById('ov_submit_btn');
const cancelBtn = document.getElementById('ov_cancel_edit_btn');
const idField = document.getElementById('ov_override_id');
const levelField = document.getElementById('ov_level');
const backupSoftwareField = document.getElementById('ov_backup_software');
const backupTypeField = document.getElementById('ov_backup_type');
const jobField = document.getElementById('ov_job_id');
const objectNameField = document.getElementById('ov_object_name');
const matchStatusField = document.getElementById('ov_match_status');
const matchErrorContainsField = document.getElementById('ov_match_error_contains');
const treatAsSuccessField = document.getElementById('ov_treat_success');
const commentField = document.getElementById('ov_comment');
const startAtField = document.getElementById('ov_start_at');
const endAtField = document.getElementById('ov_end_at');
const createAction = form.getAttribute('action');
const updateActionTemplate = "{{ url_for('main.overrides_update', override_id=0) }}";
function setFormModeEdit(ovId) {
idField.value = ovId;
form.setAttribute('action', updateActionTemplate.replace('/0', '/' + ovId));
cancelBtn.classList.remove('d-none');
if (submitBtn) submitBtn.textContent = 'Update override';
window.scrollTo({ top: 0, behavior: 'smooth' });
}
function resetFormModeCreate() {
idField.value = '';
form.setAttribute('action', createAction);
cancelBtn.classList.add('d-none');
if (submitBtn) submitBtn.textContent = 'Save override';
}
function setValue(field, val) {
if (!field) return;
field.value = (val === null || val === undefined) ? '' : String(val);
field.dispatchEvent(new Event('change'));
}
document.querySelectorAll('.ov-edit-btn').forEach(btn => {
btn.addEventListener('click', () => {
setValue(levelField, btn.dataset.ovLevel || '');
setValue(backupSoftwareField, btn.dataset.ovBackupSoftware || '');
setValue(backupTypeField, btn.dataset.ovBackupType || '');
setValue(jobField, btn.dataset.ovJobId || '');
setValue(objectNameField, btn.dataset.ovObjectName || '');
setValue(matchStatusField, btn.dataset.ovMatchStatus || '');
setValue(matchErrorContainsField, btn.dataset.ovMatchErrorContains || '');
if (treatAsSuccessField) treatAsSuccessField.checked = (btn.dataset.ovTreatAsSuccess === '1');
setValue(commentField, btn.dataset.ovComment || '');
setValue(startAtField, btn.dataset.ovStartAt || '');
setValue(endAtField, btn.dataset.ovEndAt || '');
setFormModeEdit(btn.dataset.ovId);
});
});
if (cancelBtn) {
cancelBtn.addEventListener('click', () => {
form.reset();
resetFormModeCreate();
});
}
})();
</script>
{% endblock %}

View File

@ -0,0 +1,107 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Edit override</h2>
<div class="mb-3">
<a class="btn btn-outline-secondary" href="{{ url_for('main.overrides') }}">Back to overrides</a>
</div>
<div class="card">
<div class="card-header">
Override #{{ ov.id }}
</div>
<div class="card-body">
<form method="post" action="{{ url_for('main.overrides_update', override_id=ov.id) }}" class="row g-3">
<div class="col-md-3">
<label for="ov_level" class="form-label">Level</label>
<select class="form-select" id="ov_level" name="level" required>
<option value="global" {% if (ov.level or '').lower() == 'global' %}selected{% endif %}>Global</option>
<option value="object" {% if (ov.level or '').lower() == 'object' %}selected{% endif %}>Object</option>
</select>
</div>
<div class="col-md-3">
<label for="ov_backup_software" class="form-label">Backup software (global)</label>
<select class="form-select" id="ov_backup_software" name="backup_software">
<option value="">Global</option>
{% for bs in backup_software_options %}
<option value="{{ bs }}" {% if ov.backup_software == bs %}selected{% endif %}>{{ bs }}</option>
{% endfor %}
</select>
</div>
<div class="col-md-3">
<label for="ov_backup_type" class="form-label">Backup type (global)</label>
<select class="form-select" id="ov_backup_type" name="backup_type">
<option value="">Global</option>
{% for bt in backup_type_options %}
<option value="{{ bt }}" {% if ov.backup_type == bt %}selected{% endif %}>{{ bt }}</option>
{% endfor %}
</select>
</div>
<div class="col-md-3">
<label for="ov_job_id" class="form-label">Job (object level)</label>
<select class="form-select" id="ov_job_id" name="job_id">
<option value="">-- none --</option>
{% for job in jobs_for_select %}
<option value="{{ job.id }}" {% if ov.job_id == job.id %}selected{% endif %}>
{{ job.customer.name if job.customer else "-" }} / {{ job.backup_software or "-" }} / {{ job.backup_type or "-" }} / {{ job.job_name or "-" }}
</option>
{% endfor %}
</select>
</div>
<div class="col-md-3">
<label for="ov_object_name" class="form-label">Object name (object level)</label>
<input type="text" class="form-control" id="ov_object_name" name="object_name" placeholder="Exact object name" value="{{ ov.object_name or '' }}">
</div>
<div class="col-md-3">
<label for="ov_match_status" class="form-label">Status</label>
<select class="form-select" id="ov_match_status" name="match_status">
<option value="" {% if not ov.match_status %}selected{% endif %}>Any</option>
<option value="success" {% if ov.match_status == 'success' %}selected{% endif %}>Success</option>
<option value="warning" {% if ov.match_status == 'warning' %}selected{% endif %}>Warning</option>
<option value="failed" {% if ov.match_status == 'failed' %}selected{% endif %}>Failed</option>
</select>
</div>
<div class="col-md-3">
<label for="ov_match_error_contains" class="form-label">Error contains</label>
<input type="text" class="form-control" id="ov_match_error_contains" name="match_error_contains" placeholder="Text to match in error message" value="{{ ov.match_error_contains or '' }}">
</div>
<div class="col-md-3">
<label for="ov_start_at" class="form-label">From</label>
<input type="datetime-local" class="form-control" id="ov_start_at" name="start_at" value="{{ ov.start_at.strftime('%Y-%m-%dT%H:%M') if ov.start_at else '' }}">
</div>
<div class="col-md-3">
<label for="ov_end_at" class="form-label">Until</label>
<input type="datetime-local" class="form-control" id="ov_end_at" name="end_at" value="{{ ov.end_at.strftime('%Y-%m-%dT%H:%M') if ov.end_at else '' }}">
</div>
<div class="col-md-3">
<label class="form-label d-block">&nbsp;</label>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="ov_treat_success" name="treat_as_success" {% if ov.treat_as_success %}checked{% endif %}>
<label class="form-check-label" for="ov_treat_success">
Treat as success
</label>
</div>
</div>
<div class="col-md-3">
<label class="form-label d-block">&nbsp;</label>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="ov_active" disabled {% if ov.active %}checked{% endif %}>
<label class="form-check-label" for="ov_active">
Active (toggle on Overrides page)
</label>
</div>
</div>
<div class="col-12">
<label for="ov_comment" class="form-label">Comment</label>
<textarea class="form-control" id="ov_comment" name="comment" rows="2" placeholder="Ticket number / reason for override">{{ ov.comment or '' }}</textarea>
</div>
<div class="col-12">
<button type="submit" class="btn btn-primary">Save changes</button>
<a class="btn btn-outline-secondary" href="{{ url_for('main.overrides') }}">Cancel</a>
</div>
</form>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,358 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Parsers</h2>
<div class="card mb-4">
<div class="card-header">
Mail processing flow
</div>
<div class="card-body">
<ol class="mb-0">
<li>
<strong>Retrieval</strong><br />
Mail is retrieved from Microsoft Graph (manual or automatic import) and stored in the database as a <code>MailMessage</code> record
including sender, subject, received time and message body (HTML and plain text).
</li>
<li class="mt-2">
<strong>Preprocessing</strong><br />
The message body is normalized (line endings, character set) so that the parsers can work with a consistent format.
</li>
<li class="mt-2">
<strong>Parser selection</strong><br />
All active parsers are evaluated in a fixed order. For each parser, match criteria like sender address, subject text and body
snippets are checked.
</li>
<li class="mt-2">
<strong>Parsing</strong><br />
As soon as a parser matches, it extracts:
<ul>
<li>Backup software (for example: Veeam, NAKIVO, Panel3, Syncovery)</li>
<li>Backup type (for example: Backup Job, Backup Copy Job, Replication job for VMware)</li>
<li>Job name</li>
<li>Objects within the job (for example: VMs, servers, repositories) including their status and any error message</li>
</ul>
</li>
<li class="mt-2">
<strong>Storage and linkage</strong><br />
The parsed result is stored in the database:
<ul>
<li><code>Job</code> one record per backup job</li>
<li><code>JobRun</code> one record per run of a job</li>
<li><code>JobObject</code> one record per object inside a run</li>
</ul>
The mail itself remains linked to the run via <code>mail_message_id</code>.
</li>
<li class="mt-2">
<strong>Inbox and approval</strong><br />
If a job for this mail has not been approved yet, the mail appears in the Inbox with the parsed result. After approval the run
is shown on the Jobs and Daily Jobs pages.
</li>
</ol>
</div>
</div>
<div class="card mb-4">
<div class="card-header">
Available parsers
</div>
<div class="card-body">
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">Name</th>
<th scope="col">Backup software</th>
<th scope="col">Backup type(s)</th>
<th scope="col">Match criteria</th>
<th scope="col">Order</th>
<th scope="col">Enabled</th>
</tr>
</thead>
<tbody>
{% if parsers %}
{% for p in parsers %}
<tr>
<td>{{ p.name }}</td>
<td>{{ p.backup_software }}</td>
<td>
{% if p.backup_types %}
{{ p.backup_types | join(", ") }}
{% else %}
-
{% endif %}
</td>
<td>
{% set parts = [] %}
{% if p.match.from_contains %}
{% set _ = parts.append("from contains '" ~ p.match.from_contains ~ "'") %}
{% endif %}
{% if p.match.subject_contains %}
{% set _ = parts.append("subject contains '" ~ p.match.subject_contains ~ "'") %}
{% endif %}
{% if p.match.subject_regex %}
{% set _ = parts.append("subject matches /" ~ p.match.subject_regex ~ "/") %}
{% endif %}
{% if parts %}
{{ parts | join(", ") }}
{% else %}
-
{% endif %}
</td>
<td>{{ p.order }}</td>
<td>
{% if p.enabled %}
<span class="badge bg-success">Enabled</span>
{% else %}
<span class="badge bg-secondary">Disabled</span>
{% endif %}
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="6" class="text-center text-muted py-3">
No parsers defined.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
</div>
</div>
{% if parsers %}
{% for p in parsers %}
<div class="card mb-3">
<div class="card-header">
{{ p.name }} {{ p.backup_software }}{% if p.backup_types %} ({{ p.backup_types | join(", ") }}){% endif %}
</div>
<div class="card-body">
<p class="mb-2 text-muted">{{ p.description }}</p>
{% set ex_list = p.examples if p.examples is defined else ([p.example] if p.example is defined else []) %}
{# Add supported Veeam backup-type examples that are configured/used in this installation. #}
{% if p.backup_software == 'Veeam' %}
{% set veeam_examples = [
{
'from_address': 'Veeam Backup & Replication <noreply@yourdomain>',
'subject': '[Success] Replication Job: VMware - DR Copy',
'body_snippet': 'Replication job "VMware - DR Copy" finished successfully\nObjects processed: 2\nVM01 - Success\nVM02 - Success',
'parsed_result': {
'backup_software': 'Veeam',
'backup_type': 'Replication Job',
'job_name': 'VMware - DR Copy',
'objects': [
{'name': 'VM01', 'status': 'success', 'error_message': ''},
{'name': 'VM02', 'status': 'success', 'error_message': ''}
]
}
},
{
'from_address': 'Veeam Agent <noreply@yourdomain>',
'subject': '[Warning] Agent Backup Job: LAPTOP-123',
'body_snippet': 'Agent backup job "LAPTOP-123" finished with warnings\n1 of 1 objects processed\nLAPTOP-123 - Warning',
'parsed_result': {
'backup_software': 'Veeam',
'backup_type': 'Agent Backup',
'job_name': 'LAPTOP-123',
'objects': [
{'name': 'LAPTOP-123', 'status': 'warning', 'error_message': 'Finished with warnings'}
]
}
},
{
'from_address': 'Veeam Backup for Microsoft 365 <noreply@yourdomain>',
'subject': '[Success] Microsoft 365 Backup Job: M365 Daily',
'body_snippet': 'Microsoft 365 backup job "M365 Daily" finished successfully\nObjects processed: 3\nExchange - Success\nSharePoint - Success\nOneDrive - Success',
'parsed_result': {
'backup_software': 'Veeam',
'backup_type': 'Microsoft 365 Backup',
'job_name': 'M365 Daily',
'objects': [
{'type': 'Exchange', 'name': 'Exchange', 'status': 'success', 'error_message': ''},
{'type': 'SharePoint', 'name': 'SharePoint', 'status': 'success', 'error_message': ''},
{'type': 'OneDrive', 'name': 'OneDrive', 'status': 'success', 'error_message': ''}
]
}
}
,
{
'from_address': 'Veeam Backup & Replication <noreply@yourdomain>',
'subject': '[Success] Scale-out Backup Repository: CEPH RBD',
'body_snippet': 'Scale-out Backup Repository: CEPH RBD
Extents:
- CEPH RBD1
- CEPH RBD2
Used Space: 107,6 TB
Capacity: 200 TB (66% free)',
'parsed_result': {
'backup_software': 'Veeam',
'backup_type': 'Scale-out Backup Repository',
'job_name': 'CEPH RBD',
'overall_status': 'Success',
'objects': [
{'type': 'Extent', 'name': 'CEPH RBD1', 'status': 'Online', 'error_message': ''},
{'type': 'Extent', 'name': 'CEPH RBD2', 'status': 'Online', 'error_message': ''}
]
}
},
{
'from_address': 'Veeam Backup & Replication <noreply@yourdomain>',
'subject': '[Success] Health check (1 objects)',
'body_snippet': 'Health Check Summary\nSuccess: 1\nWarnings: 0\nErrors: 0\n\nObjects:\nHealthCheck Blygold DC01 - Success',
'parsed_result': {
'backup_software': 'Veeam',
'backup_type': 'Health Check',
'job_name': 'Health Check',
'overall_status': 'Success',
'objects': [
{'name': 'HealthCheck Blygold DC01', 'status': 'Success', 'error_message': ''}
]
}
}
] %}
{% set ex_list = ex_list + veeam_examples %}
{% endif %}
{# Add Synology Active Backup examples. #}
{% if p.backup_software == 'Synology' %}
{% set synology_examples = [
{
'from_address': 'Synology Active Backup <noreply@yourdomain>',
'subject': 'NAS - Active Backup for Google Workspace - back-uptaak [Google D-Two] is gedeeltelijk voltooid',
'body_snippet': 'Back-up [Google D-Two] is voltooid, maar van sommige items kon geen back-up worden gemaakt.\n- Mijn schijf: succes: 0; waarschuwing: 11; fout: 0\n- Mail: succes: 0; waarschuwing: 11; fout: 0\nStarttijd: 2025-12-18 00:00:02\nEindtijd: 2025-12-18 00:01:38',
'parsed_result': {
'backup_software': 'Synology',
'backup_type': 'Active Backup',
'job_name': 'Google D-Two',
'overall_status': 'Warning',
'objects': []
}
}
,
{
'from_address': 'SCHWARZNAS <noreply@yourdomain>',
'subject': 'Gegevensback-uptaak op SCHWARZNAS is mislukt',
'body_snippet': 'Gegevensback-uptaak op SCHWARZNAS is mislukt.\nTaaknaam: Data backup - NAS thuis\nGa naar Hyper Backup > Logboek voor meer informatie.',
'parsed_result': {
'backup_software': 'Synology',
'backup_type': 'Hyperbackup',
'job_name': 'Data backup - NAS thuis',
'overall_status': 'Failed',
'objects': []
}
}
,
{
'from_address': 'DiskStation <noreply@yourdomain>',
'subject': '[Golfpark NAS] HiDrive cloud backup - NAS Backup - Strato HiDrive successful on DiskStation',
'body_snippet': 'Your backup task NAS Backup - Strato HiDrive is now complete.\n\nBackup Task: NAS Backup - Strato HiDrive\nBackup Destination: /users/.../DiskStation_1.hbk\nStart Time: Wed, Dec 17 2025 23:00:01\nDuration: 17 Minute 9 Second',
'parsed_result': {
'backup_software': 'Synology',
'backup_type': 'Strato HiDrive',
'job_name': 'NAS Backup - Strato HiDrive',
'overall_status': 'Success',
'objects': []
}
}] %}
{% set ex_list = ex_list + synology_examples %}
{% endif %}
{# Add NAKIVO VMware examples. #}
{% if p.backup_software == 'NAKIVO' %}
{% set nakivo_examples = [
{
'from_address': 'NAKIVO Backup & Replication <administrator@yourdomain>',
'subject': '"exchange01.kuiperbv.nl" job: Successful',
'body_snippet': 'Job Run Report\nBackup job for VMware\nexchange01.kuiperbv.nl\nSuccessful',
'parsed_result': {
'backup_software': 'NAKIVO',
'backup_type': 'Backup job for VMware',
'job_name': 'exchange01.kuiperbv.nl',
'overall_status': 'Success',
'objects': [
{'name': 'exchange01.kuiperbv.nl', 'status': 'success', 'error_message': ''}
]
}
}
] %}
{% set ex_list = ex_list + nakivo_examples %}
{% endif %}
{% for ex in ex_list %}
<h6 class="mt-2">Example {{ loop.index }}</h6>
<dl class="row mb-3">
<dt class="col-sm-2">From</dt>
<dd class="col-sm-10">{{ ex.from_address }}</dd>
<dt class="col-sm-2">Subject</dt>
<dd class="col-sm-10">{{ ex.subject }}</dd>
<dt class="col-sm-2">Body snippet</dt>
<dd class="col-sm-10">
<pre class="mb-0 small">{{ ex.body_snippet }}</pre>
</dd>
</dl>
<h6>Parsed result</h6>
<dl class="row mb-2">
<dt class="col-sm-3">Backup software</dt>
<dd class="col-sm-9">{{ ex.parsed_result.backup_software }}</dd>
<dt class="col-sm-3">Backup type</dt>
<dd class="col-sm-9">{{ ex.parsed_result.backup_type }}</dd>
<dt class="col-sm-3">Job name</dt>
<dd class="col-sm-9">{{ ex.parsed_result.job_name }}</dd>
</dl>
<div class="table-responsive mb-2">
<table class="table table-sm table-bordered mb-0">
<thead class="table-light">
<tr>
<th scope="col">Type</th>
<th scope="col">Object</th>
<th scope="col">Status</th>
<th scope="col">Error message</th>
</tr>
</thead>
<tbody>
{% for obj in ex.parsed_result.objects %}
<tr>
<td>{{ obj.type or '' }}</td>
<td>{{ obj.name }}</td>
{% set _s = (obj.status or "")|lower %}
{% set _dot = '' %}
{% if _s == 'success' %}{% set _dot = 'dot-success' %}
{% elif _s == 'warning' %}{% set _dot = 'dot-warning' %}
{% elif _s == 'error' %}{% set _dot = 'dot-failed' %}
{% elif _s == 'failed' %}{% set _dot = 'dot-failed' %}
{% elif _s == 'missed' %}{% set _dot = 'dot-missed' %}
{% endif %}
<td class="status-text {% if _s == 'success' %}status-success{% elif _s == 'warning' %}status-warning{% elif _s == 'error' %}status-error{% elif _s == 'failed' %}status-failed{% elif _s == 'missed' %}status-missed{% endif %}">{% if _dot %}<span class="status-dot {{ _dot }} me-2" aria-hidden="true"></span>{% endif %}{{ obj.status }}</td>
<td>{{ obj.error_message }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% if not loop.last %}
<hr />
{% endif %}
{% endfor %}
</div>
</div>
{% endfor %}
{% endif %}
{% endblock %}

View File

@ -0,0 +1,96 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex align-items-center justify-content-between mb-3">
<h2 class="mb-0">Remark</h2>
<a class="btn btn-outline-secondary" href="{{ url_for('main.tickets_page', tab='remarks') }}">Back</a>
</div>
<div class="card mb-3">
<div class="card-body">
<div class="mb-2">
<span class="me-2">{% if remark.resolved_at %}✅ Resolved{% else %}💬 Active{% endif %}</span>
<span class="text-muted">Start: {{ remark.start_date.strftime('%d-%m-%Y %H:%M:%S') if remark.start_date else '-' }}</span>
{% if remark.resolved_at %}
<span class="text-muted ms-2">Resolved: {{ remark.resolved_at.strftime('%d-%m-%Y %H:%M:%S') }}</span>
{% endif %}
</div>
<form method="post" class="row g-3"> <div class="col-12">
<label class="form-label">Body</label>
<textarea class="form-control" name="body" rows="6">{{ remark.body or '' }}</textarea>
</div>
{% if active_role in ['admin','operator'] %}
<div class="col-12">
<button class="btn btn-primary" type="submit">Save</button>
{% if not remark.resolved_at %}
<button class="btn btn-outline-success" type="button" onclick="if(confirm('Mark remark as resolved?')){fetch('{{ url_for('main.api_remark_resolve', remark_id=remark.id) }}',{method:'POST'}).then(()=>location.reload());}">Resolve</button>
{% endif %}
</div>
{% endif %}
</form>
</div>
</div>
<div class="row g-3">
<div class="col-lg-6">
<div class="card h-100">
<div class="card-body">
<h5 class="card-title">Scopes</h5>
{% if scopes %}
<ul class="list-group list-group-flush">
{% for s in scopes %}
<li class="list-group-item small">
<div><strong>Type:</strong> {{ s.scope_type }}</div>
<div><strong>Customer:</strong> {{ s.customer_id or '-' }}</div>
<div><strong>Backup:</strong> {{ s.backup_software or '-' }}</div>
<div><strong>Type:</strong> {{ s.backup_type or '-' }}</div>
<div><strong>Job:</strong> {{ s.job_id or '-' }}</div>
<div><strong>Job run:</strong> {{ s.job_run_id or '-' }}</div>
<div><strong>Job name match:</strong> {{ s.job_name_match or '-' }}</div>
</li>
{% endfor %}
</ul>
{% else %}
<div class="text-muted">No scopes.</div>
{% endif %}
</div>
</div>
</div>
<div class="col-lg-6">
<div class="card h-100">
<div class="card-body">
<h5 class="card-title">Linked runs (last 20)</h5>
{% if runs %}
<div class="table-responsive">
<table class="table table-sm align-middle">
<thead class="table-light">
<tr>
<th>Run at</th>
<th>Customer</th>
<th>Job</th>
<th>Status</th>
</tr>
</thead>
<tbody>
{% for r in runs %}
<tr>
<td class="text-nowrap">{{ r.run_at }}</td>
<td>{{ r.customer_name }}</td>
<td>{{ r.job_name }}</td>
<td>{{ r.status }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% else %}
<div class="text-muted">No linked runs.</div>
{% endif %}
</div>
</div>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,438 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex flex-wrap align-items-baseline justify-content-between mb-3">
<div>
<h2 class="mb-1">Reports</h2>
<div class="text-muted">Create report definitions and generate raw output for testing.</div>
</div>
<div class="mt-2 mt-md-0">
<a class="btn btn-primary" id="rep_new_btn" href="{{ url_for('main.reports_new') }}">New report</a>
</div>
</div>
<div class="row g-3">
<div class="col-12 col-xl-8">
<div class="card">
<div class="card-header d-flex align-items-center justify-content-between">
<div>
<div class="fw-semibold">Report definitions</div>
<div class="text-muted small">One-time reports are supported. Scheduling is a placeholder for now.</div>
</div>
<div class="d-flex align-items-center gap-2">
<button type="button" class="btn btn-sm btn-outline-secondary" id="rep_refresh_btn">Refresh</button>
</div>
</div>
<div class="card-body p-0">
<div class="table-responsive">
<table class="table table-hover mb-0 align-middle">
<thead class="table-light">
<tr>
<th style="width: 30%;">Name</th>
<th style="width: 18%;">Type</th>
<th style="width: 22%;">Period</th>
<th style="width: 12%;">Format</th>
<th style="width: 18%;" class="text-end">Actions</th>
</tr>
</thead>
<tbody id="rep_table_body">
<tr>
<td colspan="5" class="text-center text-muted py-4">Loading…</td>
</tr>
</tbody>
</table>
</div>
</div>
<div class="card-footer text-muted small">
Tip: Generate a report first, then preview the raw data or download the CSV export.
</div>
</div>
</div>
<div class="col-12 col-xl-4">
<div class="card">
<div class="card-header">
<div class="fw-semibold">Scheduling (placeholder)</div>
<div class="text-muted small">This is a preview of the future scheduling UI.</div>
</div>
<div class="card-body">
<div class="mb-3">
<label class="form-label">Delivery method</label>
<select class="form-select" disabled>
<option selected>Email</option>
<option>Download only</option>
</select>
<div class="form-text">Coming soon.</div>
</div>
<div class="mb-3">
<label class="form-label">Frequency</label>
<select class="form-select" disabled>
<option selected>Daily</option>
<option>Weekly</option>
<option>Monthly</option>
</select>
</div>
<div class="mb-3">
<label class="form-label">Recipients</label>
<input type="text" class="form-control" disabled placeholder="user@example.com, team@example.com" />
</div>
<div class="mb-3">
<label class="form-label">Next run</label>
<div class="input-group">
<input type="text" class="form-control" disabled placeholder="Not scheduled" />
<span class="input-group-text">UTC</span>
</div>
</div>
<div class="alert alert-info mb-0">
Scheduling is not active yet. These controls are disabled on purpose.
</div>
</div>
</div>
</div>
</div>
<!-- Raw data modal -->
<div class="modal fade" id="rep_raw_modal" tabindex="-1" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="rep_raw_title">Raw data</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="d-flex flex-wrap align-items-center justify-content-between gap-2 mb-3">
<div class="btn-group" role="group" aria-label="Raw view selector">
<button type="button" class="btn btn-outline-primary" id="rep_raw_view_summary">Summary</button>
<button type="button" class="btn btn-outline-primary" id="rep_raw_view_snapshot">Snapshot</button>
</div>
<div class="d-flex align-items-center gap-2">
<span class="text-muted small" id="rep_raw_meta">-</span>
<div class="btn-group" role="group" aria-label="Raw pagination">
<button type="button" class="btn btn-outline-secondary" id="rep_raw_prev_btn">Prev</button>
<button type="button" class="btn btn-outline-secondary" id="rep_raw_next_btn">Next</button>
</div>
</div>
</div>
<div class="table-responsive">
<table class="table table-sm table-hover align-middle mb-0" id="rep_raw_table">
<thead class="table-light" id="rep_raw_thead"></thead>
<tbody id="rep_raw_tbody">
<tr><td class="text-center text-muted py-4">Select a report to view raw data.</td></tr>
</tbody>
</table>
</div>
</div>
<div class="modal-footer">
<a class="btn btn-outline-success" id="rep_raw_download_btn" href="#" target="_blank">Download CSV</a>
<button type="button" class="btn btn-outline-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
window.addEventListener('DOMContentLoaded', function () {
var rawModalEl = document.getElementById('rep_raw_modal');
var rawModal = window.bootstrap ? new bootstrap.Modal(rawModalEl) : null;
var rawReportId = null;
var rawView = 'summary';
var rawLimit = 100;
var rawOffset = 0;
function qs(id) { return document.getElementById(id); }
function fmtPeriod(item) {
var a = (item.period_start || '').replace('T', ' ');
var b = (item.period_end || '').replace('T', ' ');
if (!a && !b) return '-';
return a + ' → ' + b;
}
function badgeForType(item) {
var t = (item.report_type || '').toLowerCase();
if (t === 'scheduled') return '<span class="badge text-bg-warning">Scheduled</span>';
return '<span class="badge text-bg-secondary">One-time</span>';
}
function escapeHtml(s) {
return (s || '').replace(/[&<>"']/g, function (c) {
return {'&':'&amp;','<':'&lt;','>':'&gt;','"':'&quot;',"'":'&#39;'}[c];
});
}
function setTableLoading(msg) {
var body = qs('rep_table_body');
body.innerHTML = '<tr><td colspan="5" class="text-center text-muted py-4">' + escapeHtml(msg || 'Loading…') + '</td></tr>';
}
function setRawLoading(msg) {
qs('rep_raw_thead').innerHTML = '';
qs('rep_raw_tbody').innerHTML = '<tr><td class="text-center text-muted py-4">' + escapeHtml(msg || 'Loading…') + '</td></tr>';
qs('rep_raw_meta').textContent = '-';
qs('rep_raw_prev_btn').disabled = true;
qs('rep_raw_next_btn').disabled = true;
}
function setRawViewButtons() {
var a = qs('rep_raw_view_summary');
var b = qs('rep_raw_view_snapshot');
if (rawView === 'snapshot') {
b.classList.add('active');
a.classList.remove('active');
} else {
a.classList.add('active');
b.classList.remove('active');
}
}
function renderRawTable(view, items) {
var thead = qs('rep_raw_thead');
var tbody = qs('rep_raw_tbody');
function thRow(cols) {
return '<tr>' + cols.map(function (c) { return '<th>' + escapeHtml(c) + '</th>'; }).join('') + '</tr>';
}
if (view === 'snapshot') {
thead.innerHTML = thRow([
'Object', 'Customer', 'Job ID', 'Job Name', 'Backup software', 'Backup type',
'Run ID', 'Run at (UTC)', 'Status', 'Missed', 'Override', 'Reviewed at', 'Remark'
]);
if (!items || !items.length) {
tbody.innerHTML = '<tr><td colspan="13" class="text-center text-muted py-4">No snapshot rows found.</td></tr>';
return;
}
tbody.innerHTML = items.map(function (r) {
return (
'<tr>' +
'<td class="text-nowrap">' + escapeHtml(r.object_name || '') + '</td>' +
'<td class="text-nowrap">' + escapeHtml(r.customer_name || '') + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.job_id || '')) + '</td>' +
'<td>' + escapeHtml(r.job_name || '') + '</td>' +
'<td class="text-nowrap">' + escapeHtml(r.backup_software || '') + '</td>' +
'<td class="text-nowrap">' + escapeHtml(r.backup_type || '') + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.run_id || '')) + '</td>' +
'<td class="text-nowrap">' + escapeHtml((r.run_at || '').replace('T', ' ')) + '</td>' +
'<td class="text-nowrap">' + escapeHtml(r.status || '') + '</td>' +
'<td class="text-nowrap">' + (r.missed ? '1' : '0') + '</td>' +
'<td class="text-nowrap">' + (r.override_applied ? '1' : '0') + '</td>' +
'<td class="text-nowrap">' + escapeHtml((r.reviewed_at || '').replace('T', ' ')) + '</td>' +
'<td>' + escapeHtml(r.remark || '') + '</td>' +
'</tr>'
);
}).join('');
return;
}
thead.innerHTML = thRow([
'Object', 'Total', 'Success', 'Success (override)', 'Warning', 'Failed', 'Missed', 'Success rate (%)'
]);
if (!items || !items.length) {
tbody.innerHTML = '<tr><td colspan="8" class="text-center text-muted py-4">No summary rows found.</td></tr>';
return;
}
tbody.innerHTML = items.map(function (r) {
return (
'<tr>' +
'<td class="text-nowrap">' + escapeHtml(r.object_name || '') + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.total_runs || 0)) + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.success_count || 0)) + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.success_override_count || 0)) + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.warning_count || 0)) + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.failed_count || 0)) + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.missed_count || 0)) + '</td>' +
'<td class="text-nowrap">' + escapeHtml(String(r.success_rate || 0)) + '</td>' +
'</tr>'
);
}).join('');
}
function updateRawMeta(total) {
var a = rawOffset + 1;
var b = Math.min(rawOffset + rawLimit, total);
if (!total) {
qs('rep_raw_meta').textContent = '0 rows';
} else {
qs('rep_raw_meta').textContent = a + '' + b + ' of ' + total;
}
qs('rep_raw_prev_btn').disabled = rawOffset <= 0;
qs('rep_raw_next_btn').disabled = (rawOffset + rawLimit) >= total;
}
function setRawDownloadLink() {
if (!rawReportId) {
qs('rep_raw_download_btn').setAttribute('href', '#');
qs('rep_raw_download_btn').classList.add('disabled');
return;
}
qs('rep_raw_download_btn').classList.remove('disabled');
qs('rep_raw_download_btn').setAttribute('href', '/api/reports/' + rawReportId + '/export.csv?view=' + rawView);
}
function loadRawData() {
if (!rawReportId) return;
setRawViewButtons();
setRawDownloadLink();
setRawLoading('Loading…');
fetch('/api/reports/' + rawReportId + '/data?view=' + rawView + '&limit=' + rawLimit + '&offset=' + rawOffset, { credentials: 'same-origin' })
.then(function (r) { return r.json(); })
.then(function (data) {
var items = (data && data.items) ? data.items : [];
var total = (data && data.total) ? data.total : 0;
renderRawTable(rawView, items);
updateRawMeta(total);
})
.catch(function () {
setRawLoading('Failed to load raw data. Generate the report first.');
});
}
function openRawModal(id) {
rawReportId = id;
rawOffset = 0;
rawView = rawView || 'summary';
qs('rep_raw_title').textContent = 'Raw data (Report #' + id + ')';
loadRawData();
rawModal.show();
}
function renderTable(items) {
var body = qs('rep_table_body');
if (!items || !items.length) {
body.innerHTML = '<tr><td colspan="5" class="text-center text-muted py-4">No reports defined yet.</td></tr>';
return;
}
body.innerHTML = '';
items.forEach(function (item) {
var tr = document.createElement('tr');
var name = escapeHtml(item.name || 'Report');
var desc = escapeHtml(item.description || '');
var typeBadge = badgeForType(item);
var period = escapeHtml(fmtPeriod(item));
var fmt = escapeHtml((item.output_format || 'csv').toUpperCase());
tr.innerHTML =
'<td>' +
'<div class="fw-semibold">' + name + '</div>' +
(desc ? '<div class="text-muted small">' + desc + '</div>' : '') +
'</td>' +
'<td>' + typeBadge + '</td>' +
'<td class="text-muted small">' + period + '</td>' +
'<td><span class="badge text-bg-light border">' + fmt + '</span></td>' +
'<td class="text-end">' +
'<button type="button" class="btn btn-sm btn-outline-primary me-1 rep-generate-btn" data-id="' + item.id + '">Generate</button>' +
'<button type="button" class="btn btn-sm btn-outline-secondary me-1 rep-view-btn" data-id="' + item.id + '">View raw</button>' +
'<a class="btn btn-sm btn-outline-success rep-download-btn" href="/api/reports/' + item.id + '/export.csv" target="_blank" rel="noopener">Download</a>' +
'</td>';;
body.appendChild(tr);
});
body.querySelectorAll('.rep-generate-btn').forEach(function (btn) {
btn.addEventListener('click', function () {
var id = btn.getAttribute('data-id');
generateReport(id, btn);
});
});
body.querySelectorAll('.rep-view-btn').forEach(function (btn) {
btn.addEventListener('click', function () {
var id = btn.getAttribute('data-id');
openRawModal(id);
});
});
}
function loadReports() {
setTableLoading('Loading…');
fetch('/api/reports', { credentials: 'same-origin' })
.then(function (r) { return r.json(); })
.then(function (data) {
renderTable((data && data.items) ? data.items : []);
})
.catch(function () {
setTableLoading('Failed to load reports.');
});
}
function generateReport(id, btnEl) {
if (!id) return;
var oldText = btnEl.textContent;
btnEl.disabled = true;
btnEl.textContent = 'Generating…';
fetch('/api/reports/' + id + '/generate', {
method: 'POST',
credentials: 'same-origin',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({})
})
.then(function (r) { return r.json().then(function (j) { return { ok: r.ok, json: j }; }); })
.then(function (res) {
btnEl.disabled = false;
btnEl.textContent = oldText;
if (!res.ok) {
alert((res.json && res.json.error) ? res.json.error : 'Generate failed.');
return;
}
if (res.json && (res.json.snapshot_rows !== undefined || res.json.summary_rows !== undefined)) {
alert('Report generated. Snapshots: ' + (res.json.snapshot_rows || 0) + ', Summary: ' + (res.json.summary_rows || 0));
} else {
alert('Report generated.');
}
})
.catch(function () {
btnEl.disabled = false;
btnEl.textContent = oldText;
alert('Generate failed.');
});
}
qs('rep_refresh_btn').addEventListener('click', loadReports);
qs('rep_raw_view_summary').addEventListener('click', function () {
rawView = 'summary';
rawOffset = 0;
loadRawData();
});
qs('rep_raw_view_snapshot').addEventListener('click', function () {
rawView = 'snapshot';
rawOffset = 0;
loadRawData();
});
qs('rep_raw_prev_btn').addEventListener('click', function () {
rawOffset = Math.max(0, rawOffset - rawLimit);
loadRawData();
});
qs('rep_raw_next_btn').addEventListener('click', function () {
rawOffset = rawOffset + rawLimit;
loadRawData();
});
loadReports();
});
</script>
{% endblock %}

View File

@ -0,0 +1,357 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex flex-wrap align-items-baseline justify-content-between mb-3">
<div>
<h2 class="mb-1">New report</h2>
<div class="text-muted">Create a one-time report definition. Generate output from the Reports overview.</div>
</div>
<div class="mt-2 mt-md-0">
<a class="btn btn-outline-secondary" href="{{ url_for('main.reports') }}">Back</a>
</div>
</div>
<div class="row g-3">
<div class="col-12 col-xl-8">
<div class="card">
<div class="card-header">
<div class="fw-semibold">Report basics</div>
<div class="text-muted small">Fields below are designed to scale as reporting grows.</div>
</div>
<div class="card-body">
<div class="alert alert-danger d-none" id="rep_new_error"></div>
<div class="row g-3">
<div class="col-12 col-md-6">
<label class="form-label">Report name <span class="text-danger">*</span></label>
<input type="text" class="form-control" id="rep_name" placeholder="e.g. Monthly Backup Report" />
</div>
<div class="col-12 col-md-6">
<label class="form-label">Output format</label>
<select class="form-select" id="rep_output_format">
<option value="csv" selected>CSV</option>
<option value="pdf" disabled>PDF (coming soon)</option>
</select>
</div>
<div class="col-12">
<label class="form-label">Description</label>
<input type="text" class="form-control" id="rep_description" placeholder="Optional description" />
</div>
</div>
<hr class="my-4" />
<div class="fw-semibold mb-1">Reporting period (UTC)</div>
<div class="text-muted small mb-3">Pick a day from the month calendar and set the time next to it.</div>
<div class="row g-3 align-items-end">
<div class="col-12 col-md-6">
<label class="form-label">Start</label>
<div class="input-group">
<input type="date" class="form-control" id="rep_start_date" />
<input type="time" class="form-control" id="rep_start_time" step="60" />
<span class="input-group-text">UTC</span>
</div>
</div>
<div class="col-12 col-md-6">
<label class="form-label">End</label>
<div class="input-group">
<input type="date" class="form-control" id="rep_end_date" />
<input type="time" class="form-control" id="rep_end_time" step="60" />
<span class="input-group-text">UTC</span>
</div>
</div>
<div class="col-12">
<div class="d-flex flex-wrap gap-2">
<button type="button" class="btn btn-sm btn-outline-secondary" id="rep_preset_cur_month">First day of current month (00:00)</button>
<button type="button" class="btn btn-sm btn-outline-secondary" id="rep_preset_last_month">First day of last month (00:00)</button>
<button type="button" class="btn btn-sm btn-outline-secondary" id="rep_preset_last_month_full">Last month (full)</button>
</div>
</div>
</div>
<hr class="my-4" />
<div class="fw-semibold mb-1">Scope selection</div>
<div class="text-muted small mb-3">Generate for a single customer, multiple customers, or all customers.</div>
<div class="row g-3">
<div class="col-12">
<div class="btn-group" role="group" aria-label="Customer scope">
<input type="radio" class="btn-check" name="rep_scope" id="rep_scope_single" value="single" autocomplete="off" checked>
<label class="btn btn-outline-primary" for="rep_scope_single">Single customer</label>
<input type="radio" class="btn-check" name="rep_scope" id="rep_scope_multiple" value="multiple" autocomplete="off">
<label class="btn btn-outline-primary" for="rep_scope_multiple">Multiple customers</label>
<input type="radio" class="btn-check" name="rep_scope" id="rep_scope_all" value="all" autocomplete="off">
<label class="btn btn-outline-primary" for="rep_scope_all">All customers</label>
</div>
</div>
<div class="col-12 col-md-6" id="rep_single_wrap">
<label class="form-label">Customer <span class="text-danger">*</span></label>
<select class="form-select" id="rep_customer_single"></select>
<div class="form-text">Search will be added later. For MVP this is a simple dropdown.</div>
</div>
<div class="col-12 col-md-6 d-none" id="rep_multiple_wrap">
<label class="form-label">Customers <span class="text-danger">*</span></label>
<select class="form-select" id="rep_customer_multiple" multiple size="10"></select>
<div class="form-text">Hold Ctrl/Cmd to select multiple customers.</div>
</div>
<div class="col-12">
<div class="alert alert-info mb-0">
Jobs selection is set to <span class="fw-semibold">all jobs for each selected customer</span> in this iteration.
</div>
</div>
</div>
<hr class="my-4" />
<div class="d-flex flex-wrap gap-2">
<button type="button" class="btn btn-primary" id="rep_create_btn">Create report</button>
<a class="btn btn-outline-secondary" href="{{ url_for('main.reports') }}">Cancel</a>
</div>
</div>
</div>
</div>
<div class="col-12 col-xl-4">
<div class="card">
<div class="card-header">
<div class="fw-semibold">Scheduling (placeholder)</div>
<div class="text-muted small">Disabled for now. Will be enabled in a future iteration.</div>
</div>
<div class="card-body">
<div class="mb-3">
<label class="form-label">Delivery method</label>
<select class="form-select" disabled>
<option selected>Email</option>
<option>Download only</option>
</select>
</div>
<div class="mb-3">
<label class="form-label">Frequency</label>
<select class="form-select" disabled>
<option selected>Monthly</option>
<option>Weekly</option>
<option>Daily</option>
</select>
</div>
<div class="mb-0">
<div class="alert alert-info mb-0">Scheduling is not active yet.</div>
</div>
</div>
</div>
</div>
</div>
<script>
window.addEventListener('DOMContentLoaded', function () {
function qs(id) { return document.getElementById(id); }
function showError(msg) {
var el = qs('rep_new_error');
el.textContent = msg || 'Unknown error';
el.classList.remove('d-none');
window.scrollTo({ top: 0, behavior: 'smooth' });
}
function clearError() {
var el = qs('rep_new_error');
el.classList.add('d-none');
el.textContent = '';
}
function pad2(n) { return (n < 10 ? '0' : '') + String(n); }
function setDateTime(prefix, d) {
qs(prefix + '_date').value = d.getUTCFullYear() + '-' + pad2(d.getUTCMonth() + 1) + '-' + pad2(d.getUTCDate());
qs(prefix + '_time').value = pad2(d.getUTCHours()) + ':' + pad2(d.getUTCMinutes());
}
function buildIso(dateStr, timeStr, fallbackTime) {
var d = (dateStr || '').trim();
var t = (timeStr || '').trim() || (fallbackTime || '00:00');
if (!d) return '';
if (!t) t = '00:00';
if (t.length === 5) t = t + ':00';
return d + 'T' + t;
}
function todayUtc() {
return new Date();
}
function presetCurrentMonth() {
var now = todayUtc();
var d = new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), 1, 0, 0, 0));
setDateTime('rep_start', d);
}
function presetLastMonth() {
var now = todayUtc();
var d = new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth() - 1, 1, 0, 0, 0));
setDateTime('rep_start', d);
}
function presetLastMonthFull() {
var now = todayUtc();
var start = new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth() - 1, 1, 0, 0, 0));
var end = new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), 0, 23, 59, 0));
setDateTime('rep_start', start);
setDateTime('rep_end', end);
}
function selectedScope() {
var els = document.querySelectorAll('input[name="rep_scope"]');
for (var i = 0; i < els.length; i++) {
if (els[i].checked) return els[i].value;
}
return 'single';
}
function updateScopeUi() {
var scope = selectedScope();
qs('rep_single_wrap').classList.toggle('d-none', scope !== 'single');
qs('rep_multiple_wrap').classList.toggle('d-none', scope !== 'multiple');
}
function loadCustomers() {
qs('rep_customer_single').innerHTML = '<option value="" selected>Loading…</option>';
qs('rep_customer_multiple').innerHTML = '';
fetch('/api/reports/customers', { credentials: 'same-origin' })
.then(function (r) { return r.json(); })
.then(function (data) {
var items = (data && data.items) ? data.items : [];
if (!items.length) {
qs('rep_customer_single').innerHTML = '<option value="" selected>No customers found</option>';
return;
}
qs('rep_customer_single').innerHTML = '<option value="" selected>Select a customer…</option>';
items.forEach(function (c) {
var opt1 = document.createElement('option');
opt1.value = String(c.id);
opt1.textContent = c.name || ('Customer ' + c.id);
qs('rep_customer_single').appendChild(opt1);
var opt2 = document.createElement('option');
opt2.value = String(c.id);
opt2.textContent = c.name || ('Customer ' + c.id);
qs('rep_customer_multiple').appendChild(opt2);
});
})
.catch(function () {
qs('rep_customer_single').innerHTML = '<option value="" selected>Failed to load customers</option>';
});
}
function validate(payload) {
if (!payload.name) return 'Report name is required.';
if (!payload.period_start || !payload.period_end) return 'Start and end period are required.';
if (payload.period_end <= payload.period_start) return 'End must be after start.';
if (payload.customer_scope === 'single') {
if (!payload.customer_ids || payload.customer_ids.length !== 1) return 'Select exactly one customer.';
}
if (payload.customer_scope === 'multiple') {
if (!payload.customer_ids || payload.customer_ids.length < 1) return 'Select at least one customer.';
}
return '';
}
function createReport() {
clearError();
var scope = selectedScope();
var customerIds = [];
if (scope === 'single') {
var v = (qs('rep_customer_single').value || '').trim();
if (v) customerIds = [parseInt(v, 10)];
} else if (scope === 'multiple') {
var opts = qs('rep_customer_multiple').selectedOptions;
for (var i = 0; i < opts.length; i++) {
var id = parseInt(opts[i].value, 10);
if (!isNaN(id)) customerIds.push(id);
}
}
var payload = {
name: (qs('rep_name').value || '').trim(),
description: (qs('rep_description').value || '').trim(),
report_type: 'one-time',
output_format: (qs('rep_output_format').value || 'csv').trim(),
schedule: '',
customer_scope: scope,
customer_ids: customerIds,
period_start: buildIso(qs('rep_start_date').value, qs('rep_start_time').value, '00:00'),
period_end: buildIso(qs('rep_end_date').value, qs('rep_end_time').value, '23:59')
};
if (!payload.description) delete payload.description;
var err = validate(payload);
if (err) {
showError(err);
return;
}
var btn = qs('rep_create_btn');
var oldText = btn.textContent;
btn.disabled = true;
btn.textContent = 'Creating…';
fetch('/api/reports', {
method: 'POST',
credentials: 'same-origin',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
})
.then(function (r) { return r.json().then(function (j) { return { ok: r.ok, json: j }; }); })
.then(function (res) {
btn.disabled = false;
btn.textContent = oldText;
if (!res.ok) {
showError((res.json && res.json.error) ? res.json.error : 'Create failed.');
return;
}
window.location.href = '{{ url_for('main.reports') }}';
})
.catch(function () {
btn.disabled = false;
btn.textContent = oldText;
showError('Create failed.');
});
}
// Defaults
var now = todayUtc();
var end = new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate(), now.getUTCHours(), now.getUTCMinutes(), 0));
var start = new Date(end.getTime() - (7 * 24 * 60 * 60 * 1000));
setDateTime('rep_start', start);
setDateTime('rep_end', end);
document.querySelectorAll('input[name="rep_scope"]').forEach(function (r) {
r.addEventListener('change', updateScopeUi);
});
qs('rep_preset_cur_month').addEventListener('click', presetCurrentMonth);
qs('rep_preset_last_month').addEventListener('click', presetLastMonth);
qs('rep_preset_last_month_full').addEventListener('click', presetLastMonthFull);
qs('rep_create_btn').addEventListener('click', createReport);
updateScopeUi();
loadCustomers();
});
</script>
{% endblock %}

View File

@ -0,0 +1,942 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex align-items-center justify-content-between flex-wrap gap-2 mb-3">
<h2 class="mb-0">Run Checks</h2>
<div class="d-flex align-items-center gap-2">
<button type="button" class="btn btn-sm btn-outline-primary" id="btn_mark_reviewed" disabled>Mark as reviewed</button>
{% if is_admin %}
<button type="button" class="btn btn-sm btn-outline-danger" id="btn_unmark_reviewed" disabled>Unmark reviewed</button>
{% endif %}
</div>
</div>
<div class="small text-muted mb-2" id="rc_status"></div>
<div class="table-responsive">
<table class="table table-sm table-hover align-middle" id="runChecksTable">
<thead class="table-light">
<tr>
<th scope="col" style="width: 34px;">
<input class="form-check-input" type="checkbox" id="rc_select_all" />
</th>
<th scope="col">Customer</th>
<th scope="col">Backup</th>
<th scope="col">Type</th>
<th scope="col">Job</th>
<th scope="col">Indicators</th>
<th scope="col" style="width: 170px;">Action</th>
</tr>
</thead>
<tbody>
{% if rows %}
{% for r in rows %}
<tr class="rc-job-row" data-job-id="{{ r.job_id }}" style="cursor: pointer;">
<td onclick="event.stopPropagation();">
<input class="form-check-input rc_row_cb" type="checkbox" value="{{ r.job_id }}" />
</td>
<td>{{ r.customer_name }}</td>
<td>{{ r.backup_software }}</td>
<td>{{ r.backup_type }}</td>
<td>{{ r.job_name }}</td>
<td class="text-nowrap">
{% if r.status_indicators %}
<span class="d-inline-flex flex-wrap align-items-center gap-2">
{% for si in r.status_indicators %}
<span class="d-inline-flex align-items-center" title="{{ si.status }}: {{ si.count }}">
{% if si.dot %}<span class="status-dot {{ si.dot }}" aria-hidden="true"></span>{% endif %}
<span class="small ms-1">{{ si.count }}</span>
</span>
{% endfor %}
</span>
{% endif %}
{% if r.has_active_ticket or r.has_active_remark %}
<span class="ms-2" style="white-space:nowrap;">
{% if r.has_active_ticket %}<span title="Active ticket(s)">🎫</span>{% endif %}
{% if r.has_active_remark %}<span class="ms-1" title="Active remark(s)">💬</span>{% endif %}
</span>
{% endif %}
</td>
<td class="text-nowrap" onclick="event.stopPropagation();">
<button type="button" class="btn btn-sm btn-outline-secondary rc-open-btn" data-job-id="{{ r.job_id }}">Open</button>
<a class="btn btn-sm btn-outline-primary ms-1" href="{{ url_for('main.job_detail', job_id=r.job_id) }}">Job page</a>
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="{{ 9 if is_admin else 7 }}" class="text-center text-muted py-3">
No runs to check.
</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
<!-- Modal: runs for a job (same style as Daily Jobs) -->
<style>
.modal-xxl { max-width: 98vw; }
@media (min-width: 1400px) { .modal-xxl { max-width: 1400px; } }
#runChecksModal .modal-content {
height: 90vh;
display: flex;
flex-direction: column;
}
#runChecksModal .modal-body {
overflow: hidden;
flex: 1 1 auto;
min-height: 0;
}
#runChecksModal #rcm_content { height: 100%; }
#runChecksModal .rcm-main-row { height: 100%; }
#runChecksModal .rcm-main-row > .col-md-3 {
display: flex;
flex-direction: column;
height: 100%;
min-height: 0;
}
#runChecksModal .rcm-detail-col {
display: flex;
flex-direction: column;
height: 100%;
min-height: 0;
}
#runChecksModal #rcm_runs_list {
flex: 1 1 auto;
min-height: 0;
overflow: auto;
}
#runChecksModal #rcm_body_iframe { height: 100%; }
#runChecksModal .rcm-mail-panel { flex: 1 1 auto; min-height: 0; }
#runChecksModal .rcm-objects-scroll { max-height: 25vh; overflow: auto; }
</style>
<div class="modal fade" id="runChecksModal" tabindex="-1" aria-labelledby="runChecksModalLabel" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable modal-xxl">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="runChecksModalLabel">Job details</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div id="rcm_loading" class="text-muted">Loading runs...</div>
<div id="rcm_content" style="display:none;">
<div class="row mb-3">
<div class="col-12">
<div class="d-flex flex-wrap gap-3 small text-muted">
<div><strong>Customer:</strong> <span id="rcm_customer"></span></div>
<div><strong>Backup:</strong> <span id="rcm_backup"></span></div>
<div><strong>Type:</strong> <span id="rcm_type"></span></div>
<div><strong>Job:</strong> <span id="rcm_job"></span></div>
</div>
</div>
</div>
<div class="row g-3 rcm-main-row">
<div class="col-md-3">
<h6 class="mb-2">Runs</h6>
<div id="rcm_status_summary" class="mb-2 small"></div>
<div id="rcm_runs_list" class="list-group"></div>
</div>
<div class="col-md-9 rcm-detail-col">
<dl class="row mb-3">
<dt class="col-3">From</dt>
<dd class="col-9" id="rcm_from"></dd>
<dt class="col-3">Subject</dt>
<dd class="col-9" id="rcm_subject"></dd>
<dt class="col-3">Received</dt>
<dd class="col-9" id="rcm_received"></dd>
<dt class="col-3">Indicator</dt>
<dd class="col-9" id="rcm_status"></dd>
<dt class="col-3">Remark</dt>
<dd class="col-9" id="rcm_remark" style="white-space: pre-wrap;"></dd>
<dt class="col-3">Meldingen</dt>
<dd class="col-9">
<div id="rcm_alerts" class="small"></div>
<div class="mt-2">
<div class="row g-2 align-items-start">
<div class="col-12 col-lg-6">
<div class="border rounded p-2">
<div class="d-flex align-items-center justify-content-between">
<div class="fw-semibold">New ticket</div>
<button type="button" class="btn btn-sm btn-outline-primary" id="rcm_ticket_save">Add</button>
</div>
<div class="mt-2">
<textarea class="form-control form-control-sm" id="rcm_ticket_description" rows="2" placeholder="Description (optional)"></textarea>
</div>
<div class="mt-2 small text-muted" id="rcm_ticket_status"></div>
</div>
</div>
<div class="col-12 col-lg-6">
<div class="border rounded p-2">
<div class="d-flex align-items-center justify-content-between">
<div class="fw-semibold">New remark</div>
<button type="button" class="btn btn-sm btn-outline-secondary" id="rcm_remark_save">Add</button>
</div>
<div class="mt-2">
<textarea class="form-control form-control-sm" id="rcm_remark_body" rows="2" placeholder="Body (required)"></textarea>
</div>
<div class="mt-2 small text-muted" id="rcm_remark_status"></div>
</div>
</div>
</div>
</div>
</dd>
</dl>
<div class="mb-3 rcm-mail-panel">
<h6>Mail</h6>
<iframe
id="rcm_body_iframe"
class="border rounded"
style="width:100%;"
sandbox="allow-popups allow-popups-to-escape-sandbox allow-same-origin"
referrerpolicy="no-referrer"
></iframe>
</div>
<div>
<h6>Objects</h6>
<div class="table-responsive rcm-objects-scroll">
<table class="table table-sm table-bordered" id="rcm_objects_table">
<thead class="table-light" style="position: sticky; top: 0; z-index: 1;">
<tr>
<th scope="col">Object</th>
<th scope="col">Type</th>
<th scope="col">Status</th>
<th scope="col">Error</th>
</tr>
</thead>
<tbody></tbody>
</table>
</div>
</div>
</div>
</div>
</div>
<div id="rcm_no_runs" class="text-muted" style="display:none;">No runs found.</div>
</div>
<div class="modal-footer">
<a id="rcm_eml_btn" class="btn btn-outline-primary" href="#" style="display:none;" rel="nofollow">Download EML</a>
<a id="rcm_job_btn" class="btn btn-outline-secondary" href="#">Open job page</a>
<button type="button" class="btn btn-primary" id="rcm_mark_all_reviewed" disabled>Mark as Reviewed</button>
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
(function () {
var table = document.getElementById('runChecksTable');
if (!table) return;
var selectAll = document.getElementById('rc_select_all');
var btnMark = document.getElementById('btn_mark_reviewed');
var btnUnmark = document.getElementById('btn_unmark_reviewed');
var statusEl = document.getElementById('rc_status');
var currentJobId = null;
var currentRunId = null;
var currentPayload = null;
var btnMarkAllReviewed = document.getElementById('rcm_mark_all_reviewed');
function statusClass(status) {
var s = (status || "").toString().toLowerCase();
// Allow derived display strings like "Success (override)"
if (s.indexOf("override") !== -1) return "status-override";
if (s.indexOf("success") !== -1) return "status-success";
if (s.indexOf("warning") !== -1) return "status-warning";
if (s === "error" || s.indexOf("fail") !== -1) return "status-failed";
if (s.indexOf("missed") !== -1) return "status-missed";
if (s.indexOf("expected") !== -1) return "status-expected";
return "";
}
function statusDotClass(status) {
var s = (status || "").toString().toLowerCase();
// Allow derived display strings like "Success (override)"
if (s.indexOf("override") !== -1) return "dot-override";
if (s.indexOf("success") !== -1) return "dot-success";
if (s.indexOf("warning") !== -1) return "dot-warning";
if (s === "error" || s.indexOf("fail") !== -1) return "dot-failed";
if (s.indexOf("missed") !== -1) return "dot-missed";
if (s.indexOf("expected") !== -1) return "dot-expected";
return "";
}
function wrapMailHtml(html) {
html = html || "";
return (
"<!doctype html><html><head><meta charset=\"utf-8\">" +
"<base target=\"_blank\">" +
"</head><body style=\"margin:0; padding:8px;\">" +
html +
"</body></html>"
);
}
function escapeHtml(s) {
return (s || "").toString()
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/\"/g, "&quot;")
.replace(/'/g, "&#39;");
}
function getSelectedJobIds() {
var cbs = table.querySelectorAll('tbody .rc_row_cb');
var ids = [];
cbs.forEach(function (cb) {
if (cb.checked) ids.push(parseInt(cb.value, 10));
});
return ids.filter(function (x) { return Number.isFinite(x); });
}
function updateButtons() {
var ids = getSelectedJobIds();
if (btnMark) btnMark.disabled = ids.length === 0;
if (btnUnmark) btnUnmark.disabled = ids.length === 0;
if (statusEl) statusEl.textContent = ids.length ? (ids.length + ' selected') : '';
}
if (selectAll) {
selectAll.addEventListener('change', function () {
var cbs = table.querySelectorAll('tbody .rc_row_cb');
cbs.forEach(function (cb) { cb.checked = selectAll.checked; });
updateButtons();
});
}
table.addEventListener('change', function (e) {
if (e.target && e.target.classList && e.target.classList.contains('rc_row_cb')) {
updateButtons();
}
});
function postJson(url, body) {
return fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
},
body: JSON.stringify(body || {})
}).then(function (r) {
return r.json().then(function (j) {
return { ok: r.ok, status: r.status, json: j };
});
});
}
function apiJson(url, opts) {
opts = opts || {};
opts.headers = opts.headers || {};
opts.headers['Content-Type'] = 'application/json';
return fetch(url, opts).then(function (r) {
return r.json().then(function (j) {
if (!r.ok || !j || j.status !== 'ok') {
var msg = (j && j.message) ? j.message : ('Request failed (' + r.status + ')');
throw new Error(msg);
}
return j;
});
});
}
if (btnMark) {
btnMark.addEventListener('click', function () {
var ids = getSelectedJobIds();
if (!ids.length) return;
btnMark.disabled = true;
postJson('{{ url_for('main.api_run_checks_mark_reviewed') }}', { job_ids: ids })
.then(function (res) {
if (!res.ok || !res.json || res.json.status !== 'ok') {
alert((res.json && res.json.message) ? res.json.message : 'Failed to mark reviewed.');
return;
}
window.location.reload();
})
.catch(function () {
alert('Failed to mark reviewed.');
});
});
}
if (btnUnmark) {
btnUnmark.addEventListener('click', function () {
var ids = getSelectedJobIds();
if (!ids.length) return;
var note = prompt('Unmark note (optional):') || '';
btnUnmark.disabled = true;
postJson('{{ url_for('main.api_run_checks_unmark_reviewed') }}', { job_ids: ids, note: note })
.then(function (res) {
if (!res.ok || !res.json || res.json.status !== 'ok') {
alert((res.json && res.json.message) ? res.json.message : 'Failed to unmark reviewed.');
return;
}
window.location.reload();
})
.catch(function () {
alert('Failed to unmark reviewed.');
});
});
}
if (btnMarkAllReviewed) {
btnMarkAllReviewed.addEventListener('click', function () {
if (!currentJobId) return;
btnMarkAllReviewed.disabled = true;
postJson('{{ url_for('main.api_run_checks_mark_reviewed') }}', { job_ids: [parseInt(currentJobId, 10)] })
.then(function (res) {
if (!res.ok || !res.json || res.json.status !== 'ok') {
alert((res.json && res.json.message) ? res.json.message : 'Failed to mark reviewed.');
btnMarkAllReviewed.disabled = false;
return;
}
window.location.reload();
})
.catch(function () {
alert('Failed to mark reviewed.');
btnMarkAllReviewed.disabled = false;
});
});
}
function renderAlerts(payload) {
var box = document.getElementById('rcm_alerts');
if (!box) return;
var tickets = (payload && payload.tickets) || [];
var remarks = (payload && payload.remarks) || [];
if (!tickets.length && !remarks.length) {
box.innerHTML = '<span class="text-muted">No tickets or remarks linked to this run.</span>';
return;
}
var html = '';
if (tickets.length) {
html += '<div class="mb-2"><strong>Tickets</strong><div class="mt-1">';
tickets.forEach(function (t) {
var status = t.resolved_at ? 'Resolved' : 'Active';
html += '<div class="mb-2 border rounded p-2" data-alert-type="ticket" data-id="' + t.id + '">' +
'<div class="d-flex align-items-start justify-content-between gap-2">' +
'<div class="flex-grow-1 min-w-0">' +
'<div class="text-truncate">' +
'<span class="me-1" title="Ticket">🎫</span>' +
'<span class="fw-semibold">' + escapeHtml(t.ticket_code || '') + '</span>' +
'<span class="ms-2 badge ' + (t.resolved_at ? 'bg-secondary' : 'bg-warning text-dark') + '">' + status + '</span>' +
'</div>' +
(t.description ? ('<div class="small text-muted mt-1">' + escapeHtml(t.description) + '</div>') : '') +
'</div>' +
'<div class="d-flex gap-1 flex-shrink-0">' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="toggle-edit-ticket" data-id="' + t.id + '" ' + (t.resolved_at ? 'disabled' : '') + '>Edit</button>' +
'<button type="button" class="btn btn-sm btn-outline-success" data-action="resolve-ticket" data-id="' + t.id + '" ' + (t.resolved_at ? 'disabled' : '') + '>Resolve</button>' +
'</div>' +
'</div>' +
'<div class="mt-2" data-edit="ticket" style="display:none;">' +
'<div class="row g-2">' +
'<div class="col-12">' +
'<textarea class="form-control form-control-sm" data-field="description" rows="2" placeholder="Description (optional)">' + escapeHtml(t.description || '') + '</textarea>' +
'</div>' +
'<div class="col-12 d-flex gap-2">' +
'<button type="button" class="btn btn-sm btn-primary" data-action="save-ticket" data-id="' + t.id + '">Save</button>' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="cancel-edit" data-id="' + t.id + '">Cancel</button>' +
'<div class="small text-muted align-self-center" data-field="status"></div>' +
'</div>' +
'</div>' +
'</div>' +
'</div>';
});
html += '</div></div>';
}
if (remarks.length) {
html += '<div class="mb-2"><strong>Remarks</strong><div class="mt-1">';
remarks.forEach(function (r) {
var status2 = r.resolved_at ? 'Resolved' : 'Active';
html += '<div class="mb-2 border rounded p-2" data-alert-type="remark" data-id="' + r.id + '">' +
'<div class="d-flex align-items-start justify-content-between gap-2">' +
'<div class="flex-grow-1 min-w-0">' +
'<div class="text-truncate">' +
'<span class="me-1" title="Remark">💬</span>' +
'<span class="fw-semibold">Remark</span>' +
'<span class="ms-2 badge ' + (r.resolved_at ? 'bg-secondary' : 'bg-warning text-dark') + '">' + status2 + '</span>' +
'</div>' +
(r.body ? ('<div class="small text-muted mt-1">' + escapeHtml(r.body) + '</div>') : '') +
'</div>' +
'<div class="d-flex gap-1 flex-shrink-0">' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="toggle-edit-remark" data-id="' + r.id + '" ' + (r.resolved_at ? 'disabled' : '') + '>Edit</button>' +
'<button type="button" class="btn btn-sm btn-outline-success" data-action="resolve-remark" data-id="' + r.id + '" ' + (r.resolved_at ? 'disabled' : '') + '>Resolve</button>' +
'</div>' +
'</div>' +
'<div class="mt-2" data-edit="remark" style="display:none;">' +
'<div class="row g-2">' +
'<div class="col-12">' +
'<textarea class="form-control form-control-sm" data-field="body" rows="2" placeholder="Body (required)">' + escapeHtml(r.body || '') + '</textarea>' +
'</div>' +
'<div class="col-12 d-flex gap-2">' +
'<button type="button" class="btn btn-sm btn-primary" data-action="save-remark" data-id="' + r.id + '">Save</button>' +
'<button type="button" class="btn btn-sm btn-outline-secondary" data-action="cancel-edit" data-id="' + r.id + '">Cancel</button>' +
'<div class="small text-muted align-self-center" data-field="status"></div>' +
'</div>' +
'</div>' +
'</div>' +
'</div>';
});
html += '</div></div>';
}
box.innerHTML = html;
Array.prototype.forEach.call(box.querySelectorAll('button[data-action]'), function (btn) {
btn.addEventListener('click', function (ev) {
ev.preventDefault();
var action = btn.getAttribute('data-action');
var id = btn.getAttribute('data-id');
if (!action || !id) return;
var wrapper = btn.closest('[data-alert-type]');
if (action === 'resolve-ticket') {
if (!confirm('Mark ticket as resolved?')) return;
apiJson('/api/tickets/' + encodeURIComponent(id) + '/resolve', {method: 'POST', body: '{}'})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) { alert(e.message || 'Failed.'); });
} else if (action === 'resolve-remark') {
if (!confirm('Mark remark as resolved?')) return;
apiJson('/api/remarks/' + encodeURIComponent(id) + '/resolve', {method: 'POST', body: '{}'})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) { alert(e.message || 'Failed.'); });
} else if (action === 'toggle-edit-ticket') {
if (!wrapper) return;
var edit = wrapper.querySelector('[data-edit="ticket"]');
if (!edit) return;
edit.style.display = (edit.style.display === 'none' || !edit.style.display) ? '' : 'none';
} else if (action === 'toggle-edit-remark') {
if (!wrapper) return;
var edit2 = wrapper.querySelector('[data-edit="remark"]');
if (!edit2) return;
edit2.style.display = (edit2.style.display === 'none' || !edit2.style.display) ? '' : 'none';
} else if (action === 'cancel-edit') {
if (!wrapper) return;
var editAny = wrapper.querySelector('[data-edit]');
if (editAny) editAny.style.display = 'none';
} else if (action === 'save-ticket') {
if (!wrapper) return;
var editT = wrapper.querySelector('[data-edit="ticket"]');
if (!editT) return;
var descEl = editT.querySelector('[data-field="description"]');
var statusEl2 = editT.querySelector('[data-field="status"]');
var descVal = descEl ? descEl.value : '';
if (statusEl2) statusEl2.textContent = 'Saving...';
apiJson('/api/tickets/' + encodeURIComponent(id), {
method: 'PATCH',
body: JSON.stringify({description: descVal})
})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) {
if (statusEl2) statusEl2.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
} else if (action === 'save-remark') {
if (!wrapper) return;
var editR = wrapper.querySelector('[data-edit="remark"]');
if (!editR) return;
var bodyEl = editR.querySelector('[data-field="body"]');
var statusEl3 = editR.querySelector('[data-field="status"]');
var bodyVal = bodyEl ? bodyEl.value : '';
if (!bodyVal || !bodyVal.trim()) {
if (statusEl3) statusEl3.textContent = 'Body is required.';
return;
}
if (statusEl3) statusEl3.textContent = 'Saving...';
apiJson('/api/remarks/' + encodeURIComponent(id), {
method: 'PATCH',
body: JSON.stringify({body: bodyVal})
})
.then(function () { loadAlerts(currentRunId); })
.catch(function (e) {
if (statusEl3) statusEl3.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
}
});
});
}
function loadAlerts(runId) {
if (!runId) {
renderAlerts({tickets: [], remarks: []});
return;
}
fetch('/api/job-runs/' + encodeURIComponent(runId) + '/alerts')
.then(function (r) { return r.json(); })
.then(function (j) {
if (!j || j.status !== 'ok') throw new Error((j && j.message) || 'Failed');
renderAlerts(j);
})
.catch(function () {
renderAlerts({tickets: [], remarks: []});
});
}
function bindInlineCreateForms() {
var btnTicket = document.getElementById('rcm_ticket_save');
var btnRemark = document.getElementById('rcm_remark_save');
var tDesc = document.getElementById('rcm_ticket_description');
var tStatus = document.getElementById('rcm_ticket_status');
var rBody = document.getElementById('rcm_remark_body');
var rStatus = document.getElementById('rcm_remark_status');
function clearStatus() {
if (tStatus) tStatus.textContent = '';
if (rStatus) rStatus.textContent = '';
}
function setDisabled(disabled) {
if (btnTicket) btnTicket.disabled = disabled;
if (btnRemark) btnRemark.disabled = disabled;
if (tDesc) tDesc.disabled = disabled;
if (rBody) rBody.disabled = disabled;
}
window.__rcmSetCreateDisabled = setDisabled;
window.__rcmClearCreateStatus = clearStatus;
if (btnTicket) {
btnTicket.addEventListener('click', function () {
if (!currentRunId) { alert('Select a run first.'); return; }
clearStatus();
var description = tDesc ? tDesc.value : '';
if (tStatus) tStatus.textContent = 'Saving...';
apiJson('/api/tickets', {
method: 'POST',
body: JSON.stringify({job_run_id: currentRunId, description: description})
})
.then(function () {
if (tDesc) tDesc.value = '';
if (tStatus) tStatus.textContent = '';
loadAlerts(currentRunId);
})
.catch(function (e) {
if (tStatus) tStatus.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
});
}
if (btnRemark) {
btnRemark.addEventListener('click', function () {
if (!currentRunId) { alert('Select a run first.'); return; }
clearStatus();
var body = rBody ? rBody.value : '';
if (!body || !body.trim()) {
if (rStatus) rStatus.textContent = 'Body is required.';
else alert('Body is required.');
return;
}
if (rStatus) rStatus.textContent = 'Saving...';
apiJson('/api/remarks', {
method: 'POST',
body: JSON.stringify({job_run_id: currentRunId, body: body})
})
.then(function () {
if (rBody) rBody.value = '';
if (rStatus) rStatus.textContent = '';
loadAlerts(currentRunId);
})
.catch(function (e) {
if (rStatus) rStatus.textContent = e.message || 'Failed.';
else alert(e.message || 'Failed.');
});
});
}
setDisabled(true);
}
function renderRun(payload, idx) {
var runs = (payload && payload.runs) || [];
if (!runs.length) {
document.getElementById('rcm_content').style.display = 'none';
var nr = document.getElementById('rcm_no_runs');
nr.textContent = (payload && payload.message) ? payload.message : 'No runs found.';
nr.style.display = 'block';
return;
}
if (idx < 0 || idx >= runs.length) idx = 0;
var run = runs[idx] || {};
var list = document.getElementById('rcm_runs_list');
if (list) {
Array.prototype.forEach.call(list.querySelectorAll('.list-group-item'), function (el) {
if (String(el.getAttribute('data-run-idx')) === String(idx)) el.classList.add('active');
else el.classList.remove('active');
});
}
var stEl = document.getElementById('rcm_status');
if (stEl) {
var d = statusDotClass(run.status);
stEl.title = run.status || '';
stEl.innerHTML = (d ? ('<span class="status-dot ' + d + '" aria-hidden="true"></span>') : '');
}
document.getElementById('rcm_remark').textContent = run.remark || '';
currentRunId = run.id || null;
if (window.__rcmClearCreateStatus) window.__rcmClearCreateStatus();
if (window.__rcmSetCreateDisabled) window.__rcmSetCreateDisabled(!currentRunId);
loadAlerts(currentRunId);
var mail = run.mail || null;
if (mail) {
document.getElementById('rcm_from').textContent = mail.from_address || '';
document.getElementById('rcm_subject').textContent = mail.subject || '';
document.getElementById('rcm_received').textContent = mail.received_at || '';
} else {
document.getElementById('rcm_from').textContent = run.missed ? 'Missed run' : '';
document.getElementById('rcm_subject').textContent = '';
document.getElementById('rcm_received').textContent = '';
}
var bodyFrame = document.getElementById('rcm_body_iframe');
if (bodyFrame) {
bodyFrame.srcdoc = wrapMailHtml(run.body_html || (run.missed ? '<div class="text-muted">No email for missed run.</div>' : ''));
}
var emlBtn = document.getElementById('rcm_eml_btn');
if (emlBtn) {
if (run.has_eml && run.mail_message_id) {
emlBtn.href = "{{ url_for('main.message_eml', message_id=0) }}".replace('0', String(run.mail_message_id));
emlBtn.style.display = 'inline-block';
} else {
emlBtn.href = '#';
emlBtn.style.display = 'none';
}
}
var tbody = document.querySelector('#rcm_objects_table tbody');
if (tbody) {
tbody.innerHTML = '';
(run.objects || []).forEach(function (obj) {
var tr = document.createElement('tr');
var tdName = document.createElement('td');
tdName.textContent = obj.name || '';
tr.appendChild(tdName);
var tdType = document.createElement('td');
tdType.textContent = obj.type || '';
tr.appendChild(tdType);
var tdStatus = document.createElement('td');
tdStatus.className = 'status-text ' + statusClass(obj.status);
var d = statusDotClass(obj.status);
tdStatus.innerHTML = (d ? ('<span class="status-dot ' + d + ' me-2" aria-hidden="true"></span>') : '') + escapeHtml(obj.status || '');
tr.appendChild(tdStatus);
var tdError = document.createElement('td');
tdError.textContent = obj.error_message || '';
tr.appendChild(tdError);
tbody.appendChild(tr);
});
}
}
function openJobModal(jobId) {
if (!jobId) return;
currentJobId = jobId;
if (btnMarkAllReviewed) btnMarkAllReviewed.disabled = true;
var modalEl = document.getElementById('runChecksModal');
var modal = bootstrap.Modal.getOrCreateInstance(modalEl);
modal.show();
document.getElementById('rcm_loading').style.display = 'block';
document.getElementById('rcm_content').style.display = 'none';
document.getElementById('rcm_no_runs').style.display = 'none';
var jobBtn = document.getElementById('rcm_job_btn');
if (jobBtn) {
jobBtn.href = "{{ url_for('main.job_detail', job_id=0) }}".replace('0', String(jobId));
}
fetch(
"{{ url_for('main.run_checks_details') }}" +
"?job_id=" + encodeURIComponent(jobId) +
"&include_reviewed=" + encodeURIComponent("{{ '1' if include_reviewed else '0' }}")
)
.then(function (response) {
var ct = (response.headers && response.headers.get('content-type')) || '';
if (!response.ok) throw new Error('Failed to load runs (' + response.status + ').');
if (ct.indexOf('application/json') === -1) throw new Error('Unexpected response while loading runs.');
return response.json();
})
.then(function (data) {
document.getElementById('rcm_loading').style.display = 'none';
currentPayload = data;
if (!data || data.status !== 'ok') {
var nr = document.getElementById('rcm_no_runs');
nr.textContent = (data && data.message) ? data.message : 'No runs found.';
nr.style.display = 'block';
return;
}
var job = data.job || {};
var runs = data.runs || [];
if (btnMarkAllReviewed) {
var hasUnreviewed = (runs || []).some(function (r) { return !r.is_reviewed; });
btnMarkAllReviewed.disabled = !hasUnreviewed;
}
var counts = {};
function _rcStatusKey(run) {
if (run && run.missed) return "missed";
var s = (run && run.status ? run.status : "").toString().toLowerCase();
if (s.indexOf("override") !== -1) return "override";
if (s.indexOf("success") !== -1) return "success";
if (s.indexOf("warning") !== -1) return "warning";
if (s === "error" || s.indexOf("fail") !== -1) return "failed";
if (s.indexOf("expected") !== -1) return "expected";
return (run && run.status) ? run.status : "unknown";
}
(runs || []).forEach(function (run) {
var k = _rcStatusKey(run);
counts[k] = (counts[k] || 0) + 1;
});
var summaryEl = document.getElementById('rcm_status_summary');
if (summaryEl) {
var order = ["failed", "warning", "missed", "expected", "override", "success"];
var labels = {
"failed": "Failed",
"warning": "Warning",
"missed": "Missed",
"expected": "Expected",
"override": "Success (override)",
"success": "Success"
};
var rows = [];
function addRow(key, label, dotClass) {
var dotHtml = dotClass ? ('<span class="status-dot ' + dotClass + '" aria-hidden="true"></span>') : '';
rows.push(
'<tr>' +
'<td class="text-center" style="width: 34px;" title="' + escapeHtml(label) + '">' + dotHtml + '</td>' +
'<td class="text-end fw-semibold" style="width: 56px;" title="' + escapeHtml(label) + '">' + String(counts[key]) + '</td>' +
'</tr>'
);
}
order.forEach(function (k) {
if (!counts[k]) return;
var dc = (k === "missed") ? "dot-missed" : statusDotClass(k);
addRow(k, labels[k] || k, dc);
});
Object.keys(counts).forEach(function (k) {
if (order.indexOf(k) !== -1) return;
addRow(k, k, statusDotClass(k));
});
if (rows.length) {
summaryEl.innerHTML = '<table class="table table-sm table-striped align-middle mb-0"><tbody>' + rows.join('') + '</tbody></table>';
} else {
summaryEl.innerHTML = '';
}
}
document.getElementById('rcm_customer').textContent = job.customer_name || '';
document.getElementById('rcm_backup').textContent = job.backup_software || '';
document.getElementById('rcm_type').textContent = job.backup_type || '';
document.getElementById('rcm_job').textContent = job.job_name || '';
document.getElementById('runChecksModalLabel').textContent = (job.job_name || 'Job') + ' - ' + (job.customer_name || '');
var listEl = document.getElementById('rcm_runs_list');
listEl.innerHTML = '';
runs.forEach(function (run, idx) {
var a = document.createElement('button');
a.type = 'button';
a.className = 'list-group-item list-group-item-action';
a.setAttribute('data-run-idx', String(idx));
var dot = run.missed ? "dot-missed" : statusDotClass(run.status);
var dotHtml = dot ? ('<span class="status-dot ' + dot + ' me-2" aria-hidden="true"></span>') : '';
var reviewedMark = run.is_reviewed ? ' <span class="ms-2" title="Reviewed" aria-label="Reviewed"></span>' : '';
a.title = run.status || '';
a.innerHTML = dotHtml + '<span class="text-nowrap">' + escapeHtml(run.run_at || 'Run') + '</span>' + reviewedMark;
a.addEventListener('click', function (ev) {
ev.preventDefault();
renderRun(data, idx);
});
listEl.appendChild(a);
});
if (runs.length) {
document.getElementById('rcm_content').style.display = 'block';
document.getElementById('rcm_no_runs').style.display = 'none';
renderRun(data, 0);
} else {
document.getElementById('rcm_content').style.display = 'none';
var nr2 = document.getElementById('rcm_no_runs');
nr2.textContent = (data && data.message) ? data.message : 'No runs found.';
nr2.style.display = 'block';
}
})
.catch(function (err) {
console.error(err);
document.getElementById('rcm_loading').style.display = 'none';
var nr3 = document.getElementById('rcm_no_runs');
nr3.textContent = (err && err.message) ? err.message : 'Failed to load runs.';
nr3.style.display = 'block';
});
}
function attachRowHandlers() {
Array.prototype.forEach.call(table.querySelectorAll('tbody tr.rc-job-row'), function (row) {
row.addEventListener('click', function () {
var jobId = row.getAttribute('data-job-id');
openJobModal(jobId);
});
});
Array.prototype.forEach.call(table.querySelectorAll('button.rc-open-btn'), function (btn) {
btn.addEventListener('click', function (ev) {
ev.preventDefault();
ev.stopPropagation();
var jobId = btn.getAttribute('data-job-id');
openJobModal(jobId);
});
});
}
bindInlineCreateForms();
attachRowHandlers();
updateButtons();
})();
</script>
{% endblock %}

View File

@ -0,0 +1,655 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex flex-wrap align-items-baseline justify-content-between mb-3">
<div>
<h2 class="mb-1">Settings</h2>
<div class="text-muted">Configure mail import, display options and maintenance actions.</div>
</div>
</div>
<ul class="nav nav-pills mb-4">
<li class="nav-item">
<a class="nav-link {% if section == 'general' %}active{% endif %}" href="{{ url_for('main.settings', section='general') }}">General</a>
</li>
<li class="nav-item">
<a class="nav-link {% if section == 'users' %}active{% endif %}" href="{{ url_for('main.settings', section='users') }}">Users</a>
</li>
<li class="nav-item">
<a class="nav-link {% if section == 'imports' %}active{% endif %}" href="{{ url_for('main.settings', section='imports') }}">Imports</a>
</li>
<li class="nav-item">
<a class="nav-link {% if section == 'maintenance' %}active{% endif %}" href="{{ url_for('main.settings', section='maintenance') }}">Maintenance</a>
</li>
<li class="nav-item">
<a class="nav-link {% if section == 'news' %}active{% endif %}" href="{{ url_for('main.settings', section='news') }}">News</a>
</li>
</ul>
<div class="card mb-4">
<div class="card-header">System status</div>
<div class="card-body">
<div class="row g-2">
<div class="col-md-6">
<div class="d-flex justify-content-between">
<div class="fw-semibold">Database size</div>
<div>{{ db_size_human }}</div>
</div>
</div>
<div class="col-md-6">
<div class="d-flex justify-content-between">
<div class="fw-semibold">Free disk space</div>
<div>
{% if free_disk_warning %}
<span class="text-danger fw-bold">{{ free_disk_human }}</span>
<span class="text-danger">(mail import will be blocked below 2 GB)</span>
{% else %}
{{ free_disk_human }}
{% endif %}
</div>
</div>
</div>
</div>
</div>
</div>
{% if section == 'general' %}
<form method="post" class="mb-4">
<div class="card mb-3">
<div class="card-header">Mail (Microsoft Graph)</div>
<div class="card-body">
<div class="row g-3">
<div class="col-md-6">
<label for="graph_tenant_id" class="form-label">Tenant ID</label>
<input type="text" class="form-control" id="graph_tenant_id" name="graph_tenant_id" value="{{ settings.graph_tenant_id or '' }}" />
</div>
<div class="col-md-6">
<label for="graph_client_id" class="form-label">Client ID</label>
<input type="text" class="form-control" id="graph_client_id" name="graph_client_id" value="{{ settings.graph_client_id or '' }}" />
</div>
<div class="col-md-6">
<label for="graph_client_secret" class="form-label">Client secret</label>
<input
type="password"
class="form-control"
id="graph_client_secret"
name="graph_client_secret"
placeholder="{% if has_client_secret %}******** (stored){% else %}enter secret{% endif %}"
/>
<div class="form-text">Leave empty to keep the existing secret.</div>
</div>
<div class="col-md-6">
<label for="graph_mailbox" class="form-label">Mailbox address</label>
<input type="text" class="form-control" id="graph_mailbox" name="graph_mailbox" value="{{ settings.graph_mailbox or '' }}" />
</div>
<div class="col-md-6">
<label for="incoming_folder" class="form-label">Incoming folder</label>
<div class="input-group">
<input type="text" class="form-control" id="incoming_folder" name="incoming_folder" value="{{ settings.incoming_folder or '' }}" readonly />
<button type="button" class="btn btn-outline-secondary" id="browse_incoming_btn">Browse...</button>
</div>
<div class="form-text">Select the folder where backup report e-mails are fetched from.</div>
</div>
<div class="col-md-6">
<label for="processed_folder" class="form-label">Processed folder</label>
<div class="input-group">
<input type="text" class="form-control" id="processed_folder" name="processed_folder" value="{{ settings.processed_folder or '' }}" readonly />
<button type="button" class="btn btn-outline-secondary" id="browse_processed_btn">Browse...</button>
</div>
<div class="form-text">Select the folder where processed e-mails are moved to.</div>
</div>
</div>
</div>
</div>
<div class="card mb-3">
<div class="card-header">Daily Jobs</div>
<div class="card-body">
<div class="row g-3">
<div class="col-md-4">
<label for="daily_jobs_start_date" class="form-label">Daily Jobs start date</label>
<input type="date" class="form-control" id="daily_jobs_start_date" name="daily_jobs_start_date" value="{{ settings.daily_jobs_start_date if settings.daily_jobs_start_date else '' }}" />
<div class="form-text">Missed checks start after this date. Older runs are used to learn schedules.</div>
</div>
</div>
</div>
</div>
<div class="card mb-3">
<div class="card-header">Display</div>
<div class="card-body">
<div class="row g-3">
<div class="col-md-6">
<label for="ui_timezone" class="form-label">Timezone</label>
<select class="form-select" id="ui_timezone" name="ui_timezone">
{% for tz in tz_options %}
<option value="{{ tz }}" {% if settings.ui_timezone == tz %}selected{% endif %}>{{ tz }}</option>
{% endfor %}
</select>
<div class="form-text">Controls how timestamps are shown in the web interface (Logging, Jobs, Daily Jobs, Run Checks, etc.).</div>
</div>
</div>
</div>
</div>
<div class="d-flex justify-content-end mt-3">
<button type="submit" class="btn btn-primary">Save settings</button>
</div>
</form>
{% endif %}
{% if section == 'users' %}
<div class="card mb-4">
<div class="card-header">User management</div>
<div class="card-body">
<div class="table-responsive mb-3">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">Username</th>
<th scope="col">Roles</th>
<th scope="col">Actions</th>
</tr>
</thead>
<tbody>
{% if users %}
{% for user in users %}
<tr>
<td>{{ user.username }}</td>
<td>{{ (user.role or '')|replace(',', ', ') }}</td>
<td>
<div class="d-flex flex-wrap gap-2">
<form method="post" action="{{ url_for('main.settings_users_reset_password', user_id=user.id) }}" class="d-inline">
<div class="input-group input-group-sm">
<input type="password" name="reset_password" class="form-control" placeholder="New password" aria-label="New password" />
<button type="submit" class="btn btn-outline-secondary">Reset</button>
</div>
</form>
{% set is_last_admin = (user.role == 'admin' and (users | selectattr('role', 'equalto', 'admin') | list | length) <= 1) %}
<form method="post" action="{{ url_for('main.settings_users_delete', user_id=user.id) }}" class="d-inline">
<button type="submit" class="btn btn-sm btn-outline-danger" {% if is_last_admin %}disabled title="Cannot delete the last admin account"{% endif %}>Delete</button>
</form>
</div>
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="3" class="text-center text-muted py-3">No users found.</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
<h5 class="mt-3">Create new user</h5>
<form method="post" action="{{ url_for('main.settings_users_create') }}" class="row g-2 align-items-end">
<div class="col-md-4">
<label for="new_username" class="form-label">Username</label>
<input type="text" class="form-control" id="new_username" name="new_username" required />
</div>
<div class="col-md-4">
<label class="form-label d-block">Roles</label>
<div class="row g-2">
<div class="col-6">
<div class="form-check">
<input class="form-check-input" type="checkbox" id="role_admin" name="new_roles" value="admin" />
<label class="form-check-label" for="role_admin">Admin</label>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="role_operator" name="new_roles" value="operator" />
<label class="form-check-label" for="role_operator">Operator</label>
</div>
</div>
<div class="col-6">
<div class="form-check">
<input class="form-check-input" type="checkbox" id="role_reporter" name="new_roles" value="reporter" />
<label class="form-check-label" for="role_reporter">Reporter</label>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="role_viewer" name="new_roles" value="viewer" checked />
<label class="form-check-label" for="role_viewer">Viewer</label>
</div>
</div>
</div>
</div>
<div class="col-md-3">
<label for="new_password" class="form-label">Password</label>
<input type="password" class="form-control" id="new_password" name="new_password" required />
</div>
<div class="col-md-1">
<button type="submit" class="btn btn-primary w-100">Create</button>
</div>
</form>
</div>
</div>
{% endif %}
{% if section == 'imports' %}
<form method="post" class="mb-4">
<div class="card mb-3">
<div class="card-header">Import configuration</div>
<div class="card-body">
<div class="form-check form-switch mb-3">
<input class="form-check-input" type="checkbox" id="auto_import_enabled" name="auto_import_enabled" {% if settings.auto_import_enabled %}checked{% endif %} />
<label class="form-check-label" for="auto_import_enabled">Enable automatic mail import</label>
</div>
<div class="row g-3">
<div class="col-md-4">
<label for="auto_import_interval_minutes" class="form-label">Interval (minutes)</label>
<input type="number" min="1" class="form-control" id="auto_import_interval_minutes" name="auto_import_interval_minutes" value="{{ settings.auto_import_interval_minutes }}" />
</div>
<div class="col-md-4">
<label for="auto_import_cutoff_date" class="form-label">Automatic importer cutoff date</label>
<input type="date" class="form-control" id="auto_import_cutoff_date" name="auto_import_cutoff_date" value="{{ settings.auto_import_cutoff_date if settings.auto_import_cutoff_date else '' }}" />
<div class="form-text">Messages older than this date are ignored and remain in the inbox.</div>
</div>
<div class="col-md-4">
<label for="manual_import_batch_size" class="form-label">Manual import batch size</label>
<input type="number" min="1" max="50" class="form-control" id="manual_import_batch_size" name="manual_import_batch_size" value="{{ settings.manual_import_batch_size }}" />
<div class="form-text">Default is 50 items per manual import.</div>
</div>
<div class="col-md-6">
<label for="ingest_eml_retention_days" class="form-label">Store EML for debugging</label>
<select class="form-select" id="ingest_eml_retention_days" name="ingest_eml_retention_days">
<option value="0" {% if (settings.ingest_eml_retention_days or 7) == 0 %}selected{% endif %}>Off</option>
<option value="7" {% if (settings.ingest_eml_retention_days or 7) == 7 %}selected{% endif %}>7 days</option>
<option value="14" {% if (settings.ingest_eml_retention_days or 7) == 14 %}selected{% endif %}>14 days</option>
</select>
<div class="form-text">When enabled, the raw .eml is stored in the database and can be downloaded from Inbox. Older EML data is removed automatically.</div>
</div>
</div>
</div>
</div>
<div class="d-flex justify-content-end mt-3">
<button type="submit" class="btn btn-primary">Save settings</button>
</div>
</form>
<div class="card mb-4">
<div class="card-header">Manual mail import</div>
<div class="card-body">
<p class="mb-3">Trigger a one-time mail import using the Microsoft Graph settings in General. The number of items is limited to 50.</p>
<form method="post" action="{{ url_for('main.settings_mail_import') }}" class="row g-2 align-items-end">
<div class="col-md-6">
<label for="manual_import_items" class="form-label">Number of items</label>
<input type="number" class="form-control" id="manual_import_items" name="manual_import_items" min="1" max="50" value="{{ settings.manual_import_batch_size }}" />
</div>
<div class="col-md-6">
<button type="submit" class="btn btn-secondary w-100">Run import</button>
</div>
</form>
<p class="mt-3 text-muted mb-0">Results (counts and any errors) are shown as notifications and recorded on the Logging page.</p>
</div>
</div>
{% endif %}
{% if section == 'maintenance' %}
<div class="row g-3 mb-4">
<div class="col-12 col-lg-6">
<div class="card h-100">
<div class="card-header">Approved jobs export / import</div>
<div class="card-body">
<p class="mb-3">Export and import previously approved jobs (customers and job definitions). Useful when starting with a clean installation and restoring your job list.</p>
<div class="d-flex flex-wrap gap-2 mb-3">
<a class="btn btn-outline-primary" href="{{ url_for('main.settings_jobs_export') }}">Download export (JSON)</a>
</div>
<hr class="my-3" />
<form method="post" action="{{ url_for('main.settings_jobs_import') }}" enctype="multipart/form-data" onsubmit="return confirm('Import jobs from file? Existing jobs with the same key (Customer + Backup + Type + Job name) will be updated.');">
<div class="row g-2">
<div class="col-md-8">
<label for="jobs_file" class="form-label">Import file</label>
<input type="file" class="form-control" id="jobs_file" name="jobs_file" accept="application/json" required />
</div>
<div class="col-md-4 d-flex align-items-end">
<button type="submit" class="btn btn-primary w-100">Import jobs</button>
</div>
<div class="col-md-8">
<div class="form-text">Use a JSON export created by this application.</div>
</div>
</div>
</form>
</div>
</div>
</div>
<div class="col-12 col-lg-6">
<div class="card h-100 border-warning">
<div class="card-header bg-warning">Object maintenance</div>
<div class="card-body">
<p class="mb-3">Rebuild object links for existing approved runs (repairs missing reporting links).</p>
<form method="post" action="{{ url_for('main.settings_objects_backfill') }}" onsubmit="return confirm('Run object backfill now?');">
<button type="submit" class="btn btn-warning">Backfill objects</button>
</form>
</div>
</div>
</div>
<div class="col-12 col-lg-6">
<div class="card h-100 border-danger">
<div class="card-header bg-danger text-white">Jobs maintenance</div>
<div class="card-body">
<p class="mb-3">Delete <strong>all</strong> jobs and job runs. Related mails will be returned to the Inbox.</p>
<form method="post" action="{{ url_for('main.settings_jobs_delete_all') }}" onsubmit="return confirm('Delete ALL jobs? This cannot be undone.');">
<button type="submit" class="btn btn-danger">Delete all jobs</button>
</form>
</div>
</div>
</div>
</div>
<div class="card mb-4 border-danger">
<div class="card-header bg-danger text-white">Danger zone</div>
<div class="card-body">
<p class="text-muted mb-3">
Resetting will permanently delete all application data (customers, jobs, runs, logs, tickets, remarks and users).
After reset you will be redirected to the initial setup to create a new admin account.
</p>
<form method="post" action="{{ url_for('main.settings_app_reset') }}" class="row g-2 align-items-end">
<div class="col-md-4">
<label class="form-label">Type RESET to confirm</label>
<input type="text" name="confirm_reset" class="form-control" placeholder="RESET" autocomplete="off" />
</div>
<div class="col-md-8">
<button type="submit" class="btn btn-danger">Reset application</button>
</div>
</form>
</div>
</div>
{% endif %}
{% if section == 'general' %}
<!-- Folder tree modal -->
<div class="modal fade" id="folderTreeModal" tabindex="-1" aria-labelledby="folderTreeModalLabel" aria-hidden="true">
<div class="modal-dialog modal-lg modal-dialog-scrollable">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="folderTreeModalLabel">Select folder</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div id="folderTreeLoading" class="mb-2">Loading folders from Microsoft Graph...</div>
<div id="folderTreeError" class="text-danger mb-2 d-none"></div>
<ul id="folderTree" class="list-unstyled"></ul>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
(function () {
let targetInput = null;
let foldersLoaded = false;
function renderTree(nodes, container) {
container.innerHTML = "";
nodes.forEach(function (node) {
const li = document.createElement("li");
const btn = document.createElement("button");
btn.type = "button";
btn.className = "btn btn-sm btn-outline-primary mb-1";
btn.textContent = node.path || node.displayName;
btn.onclick = function () {
if (targetInput) {
targetInput.value = node.path || node.displayName;
}
const modalEl = document.getElementById("folderTreeModal");
const modal = bootstrap.Modal.getInstance(modalEl);
if (modal) {
modal.hide();
}
};
li.appendChild(btn);
if (node.children && node.children.length > 0) {
const ul = document.createElement("ul");
ul.className = "list-unstyled ms-3";
renderTree(node.children, ul);
li.appendChild(ul);
}
container.appendChild(li);
});
}
function loadFolders() {
const loadingEl = document.getElementById("folderTreeLoading");
const errorEl = document.getElementById("folderTreeError");
const treeEl = document.getElementById("folderTree");
loadingEl.classList.remove("d-none");
errorEl.classList.add("d-none");
errorEl.textContent = "";
treeEl.innerHTML = "";
fetch("{{ url_for('main.settings_folders') }}")
.then(function (resp) {
if (!resp.ok) {
throw new Error("Failed to load folders");
}
return resp.json();
})
.then(function (data) {
loadingEl.classList.add("d-none");
if (data.status !== "ok") {
errorEl.textContent = data.message || "Unknown error.";
errorEl.classList.remove("d-none");
return;
}
foldersLoaded = true;
renderTree(data.folders || [], treeEl);
})
.catch(function (err) {
loadingEl.classList.add("d-none");
errorEl.textContent = "Failed to load folders from Microsoft Graph.";
errorEl.classList.remove("d-none");
console.error(err);
});
}
document.addEventListener("DOMContentLoaded", function () {
const modalEl = document.getElementById("folderTreeModal");
const modal = new bootstrap.Modal(modalEl);
const incomingBtn = document.getElementById("browse_incoming_btn");
const processedBtn = document.getElementById("browse_processed_btn");
const incomingInput = document.getElementById("incoming_folder");
const processedInput = document.getElementById("processed_folder");
if (incomingBtn && incomingInput) {
incomingBtn.addEventListener("click", function () {
targetInput = incomingInput;
modal.show();
if (!foldersLoaded) {
loadFolders();
}
});
}
if (processedBtn && processedInput) {
processedBtn.addEventListener("click", function () {
targetInput = processedInput;
modal.show();
if (!foldersLoaded) {
loadFolders();
}
});
}
});
})();
</script>
{% endif %}
{% if section == 'news' %}
<div class="card mb-4">
<div class="card-header">News</div>
<div class="card-body">
<form method="post" action="{{ url_for('main.settings_news_create') }}" class="mb-4">
<div class="row g-3">
<div class="col-md-6">
<label class="form-label">Title</label>
<input type="text" class="form-control" name="title" required />
</div>
<div class="col-md-6">
<label class="form-label">Link (optional)</label>
<input type="url" class="form-control" name="link_url" placeholder="https://..." />
</div>
<div class="col-12">
<label class="form-label">Body</label>
<textarea class="form-control" name="body" rows="4" required></textarea>
</div>
<div class="col-md-3">
<label class="form-label">Severity</label>
<select class="form-select" name="severity">
<option value="info" selected>Info</option>
<option value="warning">Warning</option>
</select>
</div>
<div class="col-md-3">
<label class="form-label">Publish from (optional)</label>
<input type="datetime-local" class="form-control" name="publish_from" />
</div>
<div class="col-md-3">
<label class="form-label">Publish until (optional)</label>
<input type="datetime-local" class="form-control" name="publish_until" />
</div>
<div class="col-md-3 d-flex align-items-end">
<div class="d-flex flex-wrap gap-3">
<div class="form-check">
<input class="form-check-input" type="checkbox" name="active" id="news_active_new" checked />
<label class="form-check-label" for="news_active_new">Active</label>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" name="pinned" id="news_pinned_new" />
<label class="form-check-label" for="news_pinned_new">Pinned</label>
</div>
</div>
</div>
</div>
<div class="d-flex justify-content-end mt-3">
<button type="submit" class="btn btn-primary">Create news item</button>
</div>
</form>
{% if news_admin_items %}
<div class="accordion" id="newsItemsAccordion">
{% for item in news_admin_items %}
<div class="accordion-item">
<h2 class="accordion-header" id="headingNews{{ item.id }}">
<button class="accordion-button collapsed" type="button" data-bs-toggle="collapse" data-bs-target="#collapseNews{{ item.id }}" aria-expanded="false" aria-controls="collapseNews{{ item.id }}">
{% if item.pinned %}📌 {% endif %}{{ item.title }}
{% if item.active %}
<span class="badge text-bg-success ms-2">Active</span>
{% else %}
<span class="badge text-bg-secondary ms-2">Inactive</span>
{% endif %}
{% if item.severity == 'warning' %}
<span class="badge text-bg-warning ms-2">Warning</span>
{% else %}
<span class="badge text-bg-info ms-2">Info</span>
{% endif %}
{% set stats = news_admin_stats.get(item.id) %}
{% if stats %}
<span class="badge text-bg-light text-dark ms-2">{{ stats.read }}/{{ stats.total }} read</span>
{% endif %}
</button>
</h2>
<div id="collapseNews{{ item.id }}" class="accordion-collapse collapse" aria-labelledby="headingNews{{ item.id }}" data-bs-parent="#newsItemsAccordion">
<div class="accordion-body">
<form method="post" action="{{ url_for('main.settings_news_update', news_id=item.id) }}" class="mb-3">
<div class="row g-3">
<div class="col-md-6">
<label class="form-label">Title</label>
<input type="text" class="form-control" name="title" value="{{ item.title }}" required />
</div>
<div class="col-md-6">
<label class="form-label">Link (optional)</label>
<input type="url" class="form-control" name="link_url" value="{{ item.link_url or '' }}" />
</div>
<div class="col-12">
<label class="form-label">Body</label>
<textarea class="form-control" name="body" rows="4" required>{{ item.body }}</textarea>
</div>
<div class="col-md-3">
<label class="form-label">Severity</label>
<select class="form-select" name="severity">
<option value="info" {% if item.severity != 'warning' %}selected{% endif %}>Info</option>
<option value="warning" {% if item.severity == 'warning' %}selected{% endif %}>Warning</option>
</select>
</div>
<div class="col-md-3">
<label class="form-label">Publish from (optional)</label>
<input type="datetime-local" class="form-control" name="publish_from" value="{{ item.publish_from.strftime('%Y-%m-%dT%H:%M') if item.publish_from else '' }}" />
</div>
<div class="col-md-3">
<label class="form-label">Publish until (optional)</label>
<input type="datetime-local" class="form-control" name="publish_until" value="{{ item.publish_until.strftime('%Y-%m-%dT%H:%M') if item.publish_until else '' }}" />
</div>
<div class="col-md-3 d-flex align-items-end">
<div class="d-flex flex-wrap gap-3">
<div class="form-check">
<input class="form-check-input" type="checkbox" name="active" id="news_active_{{ item.id }}" {% if item.active %}checked{% endif %} />
<label class="form-check-label" for="news_active_{{ item.id }}">Active</label>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" name="pinned" id="news_pinned_{{ item.id }}" {% if item.pinned %}checked{% endif %} />
<label class="form-check-label" for="news_pinned_{{ item.id }}">Pinned</label>
</div>
</div>
</div>
</div>
<div class="d-flex justify-content-end mt-3">
<button type="submit" class="btn btn-sm btn-primary">Save</button>
</div>
</form>
<div class="d-flex flex-wrap justify-content-between gap-2">
<div class="d-flex flex-wrap gap-2">
<a class="btn btn-sm btn-outline-secondary" href="{{ url_for('main.settings_news_reads', news_id=item.id) }}">View reads</a>
<form method="post" action="{{ url_for('main.settings_news_reset_reads', news_id=item.id) }}" class="d-inline">
<button type="submit" class="btn btn-sm btn-outline-warning">Reset read status</button>
</form>
</div>
<form method="post" action="{{ url_for('main.settings_news_delete', news_id=item.id) }}" class="d-inline" onsubmit="return confirm('Delete this news item?');">
<button type="submit" class="btn btn-sm btn-outline-danger">Delete</button>
</form>
</div>
</div>
</div>
</div>
{% endfor %}
</div>
{% else %}
<div class="text-muted">No news items yet.</div>
{% endif %}
</div>
</div>
{% endif %}
{% endblock %}

View File

@ -0,0 +1,40 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex flex-wrap align-items-center justify-content-between mb-3">
<div>
<h2 class="mb-1">News reads</h2>
<div class="text-muted">{{ item.title }}</div>
</div>
<div>
<a class="btn btn-outline-secondary" href="{{ url_for('main.settings', section='news') }}">Back to News</a>
</div>
</div>
<div class="card">
<div class="card-header">Read by</div>
<div class="card-body">
{% if reads %}
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th scope="col">User</th>
<th scope="col">Read at</th>
</tr>
</thead>
<tbody>
{% for read, user in reads %}
<tr>
<td>{{ user.username }}</td>
<td>{{ read.read_at }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% else %}
<div class="text-muted">No reads yet.</div>
{% endif %}
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,96 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex align-items-center justify-content-between mb-3">
<h2 class="mb-0">Ticket {{ ticket.ticket_code }}</h2>
<a class="btn btn-outline-secondary" href="{{ url_for('main.tickets_page', tab='tickets') }}">Back</a>
</div>
<div class="card mb-3">
<div class="card-body">
<div class="mb-2">
<span class="me-2">{% if ticket.resolved_at %}✅ Resolved{% else %}🎫 Active{% endif %}</span>
<span class="text-muted me-2">Active from: {{ ticket.active_from_date.strftime('%d-%m-%Y') if ticket.active_from_date else '-' }}</span>
<span class="text-muted">Start: {{ ticket.start_date.strftime('%d-%m-%Y %H:%M:%S') if ticket.start_date else '-' }}</span>
{% if ticket.resolved_at %}
<span class="text-muted ms-2">Resolved: {{ ticket.resolved_at.strftime('%d-%m-%Y %H:%M:%S') }}</span>
{% endif %}
</div>
<form method="post" class="row g-3"> <div class="col-12">
<label class="form-label">Description</label>
<textarea class="form-control" name="description" rows="5">{{ ticket.description or '' }}</textarea>
</div>
{% if active_role in ['admin','operator'] %}
<div class="col-12">
<button class="btn btn-primary" type="submit">Save</button>
{% if not ticket.resolved_at %}
<button class="btn btn-outline-success" type="button" onclick="if(confirm('Mark ticket as resolved?')){fetch('{{ url_for('main.api_ticket_resolve', ticket_id=ticket.id) }}',{method:'POST'}).then(()=>location.reload());}">Resolve</button>
{% endif %}
</div>
{% endif %}
</form>
</div>
</div>
<div class="row g-3">
<div class="col-lg-6">
<div class="card h-100">
<div class="card-body">
<h5 class="card-title">Scopes</h5>
{% if scopes %}
<ul class="list-group list-group-flush">
{% for s in scopes %}
<li class="list-group-item small">
<div><strong>Type:</strong> {{ s.scope_type }}</div>
<div><strong>Customer:</strong> {{ s.customer_id or '-' }}</div>
<div><strong>Backup:</strong> {{ s.backup_software or '-' }}</div>
<div><strong>Type:</strong> {{ s.backup_type or '-' }}</div>
<div><strong>Job:</strong> {{ s.job_id or '-' }}</div>
<div><strong>Job name match:</strong> {{ s.job_name_match or '-' }}</div>
</li>
{% endfor %}
</ul>
{% else %}
<div class="text-muted">No scopes.</div>
{% endif %}
</div>
</div>
</div>
<div class="col-lg-6">
<div class="card h-100">
<div class="card-body">
<h5 class="card-title">Linked runs (last 20)</h5>
{% if runs %}
<div class="table-responsive">
<table class="table table-sm align-middle">
<thead class="table-light">
<tr>
<th>Run at</th>
<th>Customer</th>
<th>Job</th>
<th>Status</th>
</tr>
</thead>
<tbody>
{% for r in runs %}
<tr>
<td class="text-nowrap">{{ r.run_at }}</td>
<td>{{ r.customer_name }}</td>
<td>{{ r.job_name }}</td>
<td>{{ r.status }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% else %}
<div class="text-muted">No linked runs.</div>
{% endif %}
</div>
</div>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,169 @@
{% extends "layout/base.html" %}
{% block content %}
<h2 class="mb-3">Tickets &amp; Remarks</h2>
<ul class="nav nav-tabs mb-3">
<li class="nav-item">
<a class="nav-link {% if tab == 'tickets' %}active{% endif %}" href="{{ url_for('main.tickets_page', tab='tickets', active=('1' if active_only else '0'), customer_id=customer_id, backup_software=backup_software, backup_type=backup_type, q=q) }}">Tickets</a>
</li>
<li class="nav-item">
<a class="nav-link {% if tab == 'remarks' %}active{% endif %}" href="{{ url_for('main.tickets_page', tab='remarks', active=('1' if active_only else '0'), customer_id=customer_id, backup_software=backup_software, backup_type=backup_type, q=q) }}">Remarks</a>
</li>
</ul>
<form method="get" class="row g-2 align-items-end mb-3">
<input type="hidden" name="tab" value="{{ tab }}" />
<div class="col-auto">
<label class="form-label" for="flt_active">Status</label>
<select class="form-select" id="flt_active" name="active">
<option value="1" {% if active_only %}selected{% endif %}>Active</option>
<option value="0" {% if not active_only %}selected{% endif %}>All</option>
</select>
</div>
<div class="col-auto">
<label class="form-label" for="flt_customer">Customer</label>
<select class="form-select" id="flt_customer" name="customer_id">
<option value="0">All</option>
{% for c in customers %}
<option value="{{ c.id }}" {% if customer_id == c.id %}selected{% endif %}>{{ c.name }}</option>
{% endfor %}
</select>
</div>
<div class="col-auto">
<label class="form-label" for="flt_backup">Backup software</label>
<input class="form-control" id="flt_backup" name="backup_software" value="{{ backup_software }}" />
</div>
<div class="col-auto">
<label class="form-label" for="flt_type">Backup type</label>
<input class="form-control" id="flt_type" name="backup_type" value="{{ backup_type }}" />
</div>
<div class="col-auto" style="min-width: 260px;">
<label class="form-label" for="flt_q">Search</label>
<input class="form-control" id="flt_q" name="q" value="{{ q }}" placeholder="ticket code / description / job" />
</div>
<div class="col-auto">
<button class="btn btn-primary" type="submit">Filter</button>
<a class="btn btn-outline-secondary" href="{{ url_for('main.tickets_page', tab=tab) }}">Reset</a>
</div>
</form>
{% if tab == 'tickets' %}
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th>Status</th>
<th>Ticket code</th>
<th>Customers</th>
<th>Scope</th>
<th class="text-end">Linked runs</th>
<th>Active from</th>
<th>Start date</th>
<th>Resolved at</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{% if tickets %}
{% for t in tickets %}
<tr>
<td>
{% if t.active %}
<span title="Active">🎫</span> Active
{% else %}
<span title="Resolved"></span> Resolved
{% endif %}
</td>
<td class="text-nowrap">{{ t.ticket_code }}</td>
<td>{{ t.customers }}</td>
<td>{{ t.scope_summary }}</td>
<td class="text-end">{{ t.linked_runs }}</td>
<td class="text-nowrap">{{ t.active_from_date }}</td>
<td class="text-nowrap">{{ t.start_date }}</td>
<td class="text-nowrap">{{ t.resolved_at }}</td>
<td class="text-nowrap">
<a class="btn btn-sm btn-outline-primary" href="{{ url_for('main.ticket_detail', ticket_id=t.id) }}">View / Edit</a>
{% if t.active and t.job_id %}
<a class="btn btn-sm btn-outline-secondary ms-1" href="{{ url_for('main.job_detail', job_id=t.job_id) }}">Job page</a>
{% endif %}
{% if t.active and (active_role in ['admin','operator']) %}
<form method="post" action="{{ url_for('main.api_ticket_resolve', ticket_id=t.id) }}" class="d-inline">
<button class="btn btn-sm btn-outline-success" type="submit">Resolve</button>
</form>
{% endif %}
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="9" class="text-muted text-center py-3">No tickets found.</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{% else %}
<div class="table-responsive">
<table class="table table-sm table-hover align-middle">
<thead class="table-light">
<tr>
<th>Status</th>
<th>Remark</th>
<th>Customers</th>
<th>Scope</th>
<th class="text-end">Linked runs</th>
<th>Start date</th>
<th>Resolved at</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{% if remarks %}
{% for r in remarks %}
<tr>
<td>
{% if r.active %}
<span title="Active">💬</span> Active
{% else %}
<span title="Resolved"></span> Resolved
{% endif %}
</td>
<td>
<div class="text-muted small">{{ r.preview }}</div>
</td>
<td>{{ r.customers }}</td>
<td>{{ r.scope_summary }}</td>
<td class="text-end">{{ r.linked_runs }}</td>
<td class="text-nowrap">{{ r.start_date }}</td>
<td class="text-nowrap">{{ r.resolved_at }}</td>
<td class="text-nowrap">
<a class="btn btn-sm btn-outline-primary" href="{{ url_for('main.remark_detail', remark_id=r.id) }}">View / Edit</a>
{% if r.active and r.job_id %}
<a class="btn btn-sm btn-outline-secondary ms-1" href="{{ url_for('main.job_detail', job_id=r.job_id) }}">Job page</a>
{% endif %}
{% if r.active and (active_role in ['admin','operator']) %}
<form method="post" action="{{ url_for('main.api_remark_resolve', remark_id=r.id) }}" class="d-inline">
<button class="btn btn-sm btn-outline-success" type="submit">Resolve</button>
</form>
{% endif %}
</td>
</tr>
{% endfor %}
{% else %}
<tr>
<td colspan="8" class="text-muted text-center py-3">No remarks found.</td>
</tr>
{% endif %}
</tbody>
</table>
</div>
{% endif %}
{% endblock %}

View File

@ -0,0 +1,33 @@
{% extends "layout/base.html" %}
{% block content %}
<div class="d-flex justify-content-between align-items-center mb-3">
<h1 class="h4 mb-0">User Settings</h1>
</div>
<div class="card" style="max-width: 40rem;">
<div class="card-body">
<h2 class="h6">Change password</h2>
<form method="post" class="row g-3">
<div class="col-12">
<label class="form-label" for="current_password">Current password</label>
<input class="form-control" type="password" id="current_password" name="current_password" autocomplete="current-password" required />
</div>
<div class="col-12">
<label class="form-label" for="new_password">New password</label>
<input class="form-control" type="password" id="new_password" name="new_password" autocomplete="new-password" required />
</div>
<div class="col-12">
<label class="form-label" for="confirm_password">Confirm new password</label>
<input class="form-control" type="password" id="confirm_password" name="confirm_password" autocomplete="new-password" required />
</div>
<div class="col-12">
<button class="btn btn-primary" type="submit">Update password</button>
</div>
</form>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,54 @@
version: "3.8"
services:
backupchecks:
image: gitea.oskamp.info/ivooskamp/backupchecks:dev
container_name: backupchecks
restart: unless-stopped
depends_on:
- postgres
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
DB_HOST: backupchecks-postgres
DB_PORT: 5432
ports:
- "8080:8080"
networks:
- backupnet
postgres:
image: postgres:16
container_name: backupchecks-postgres
restart: unless-stopped
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
volumes:
- /docker/appdata/backupchecks/backupchecks-postgres:/var/lib/postgresql/data
networks:
- backupnet
adminer:
image: adminer:latest
container_name: backupchecks-adminer
restart: unless-stopped
ports:
- "8081:8080"
networks:
- backupnet
networks:
backupnet:
driver: bridge
POSTGRES_DB=backup
POSTGRES_USER=backup
POSTGRES_PASSWORD=Changeme
DB_HOST=backupchecks-postgres
DB_PORT=5432
APP_PORT=8080
APP_ENV=production
APP_SECRET_KEY=Changeme

0
docs/architecture.md Normal file
View File

620
docs/changelog.md Normal file
View File

@ -0,0 +1,620 @@
## v20251231-01-dashboard-description
- Added a comprehensive explanatory section to the Dashboard, positioned directly below the legend.
- Introduced detailed documentation describing how Backupchecks collects, normalizes, and evaluates backup results across multiple backup solutions.
- Clarified the purpose and behavior of dashboard counters, job statuses, and override indicators.
- Expanded the explanation of the Daily Jobs and Run Checks workflows, including the review, follow-up, resolution, and override processes.
- Documented the operational goal of maintaining an empty or near-empty Run Checks page to ensure full review and accountability.
- Clearly positioned Backupchecks as a monitoring, validation, and control platform that enhances, but does not replace, existing backup software.
---
## v20251231-02-daily-first-open-dashboard
- Added logic to automatically redirect users to the Dashboard on their first website visit of each day.
- The redirect is applied per browser session after authentication, ensuring normal navigation afterwards.
- Excluded API endpoints and static asset requests from the redirect to prevent unintended side effects.
- Ensured the behavior is transparent to users while improving daily visibility of overall system status.
---
## v20251231-03-dashboard-news-settings-sections
- Added a Dashboard News section displaying active announcements per user.
- Implemented per-user read tracking, allowing users to mark news items as read so they disappear only for that user.
- Added admin management for News items in Settings (create, edit, delete, pin, severity, active state, publish window).
- Added admin visibility into read status per news item, including read counts and user timestamps.
- Implemented admin action to reset read status for a news item.
- Refactored the Settings page into separate sections to improve usability and prevent overly long pages.
---
## v20251231-04-settings-tabs-restructure
### Settings
- Removed the collapse-based layout from **Settings → General** and converted it to a fixed section view for improved clarity.
- Moved **Import configuration** from **Settings → General** to the dedicated **Imports** section.
- Moved **User Management** from **Settings → Maintenance** to the dedicated **Users** section.
- Moved **Manual mail import** from **Settings → Maintenance** to the **Imports** section.
- Simplified and clarified the overall Settings structure by grouping related functionality into dedicated sections.
---
## v20251231-05-reporting-raw-data-foundation
### Reporting Foundation
- Introduced the initial reporting foundation focused on raw data collection.
- Added support for object-based reporting across multiple jobs.
- Implemented immutable report snapshot data to ensure auditability.
- Added aggregated summary data to calculate success, warning, failed, missed, and override metrics per object.
- Prepared reporting structures to support CSV output and future PDF generation.
- Enabled both one-time and scheduled report concepts at the data and API level.
- Ensured report data is reusable for downloads and email delivery.
- Aligned reporting permissions so admin, operator, and reporter roles have equal access in phase 1.
- Designed the reporting model to be extensible for future RBAC, UI, and visualization enhancements.
---
## v20251231-06-reports-ui-placeholders
- Added an initial Reports UI to create, view, and manage report definitions for testing purposes.
- Implemented UI components to generate and download raw report data as CSV.
- Added scheduling placeholders (non-functional) to preview future report scheduling options.
- Prepared the Reports page layout for future extensions such as automated delivery and scheduling.
---
## v20251231-07-reports-raw-data-ui
- Added raw data preview functionality to the Reports page, allowing users to view report data directly in the UI without downloading a CSV.
- Introduced a modal-based raw data viewer with pagination support for large datasets.
- Implemented backend support to generate report data on demand for UI preview usage.
- Added API endpoints to retrieve report raw data with configurable limits and offsets.
- Prepared the reporting flow to support future export options (CSV/PDF) using the same data foundation.
---
## v20251231-08-reports-generation-enabled
- Enabled actual report generation from the Reports page.
- Connected the “Generate report” action to the backend generation logic.
- Ensured raw data can be used to build a report without requiring a schedule.
- Kept scheduling functionality as a placeholder only, as intended.
- Improved UI state handling after report generation (status and available actions).
---
## v20260101-01-reports-new-report-button-fix
- Fixed the “New report” button on the Reports page so it correctly opens the report creation modal.
- Ensured report-related JavaScript is initialized after DOMContentLoaded to avoid issues with unloaded dependencies.
- Restored the ability to start creating a new report from the UI.
---
## v20260101-02-reports-new-report-page-multicustomer
- Replaced the “New report” modal with a dedicated page for creating reports.
- Added a full Create Report page with support for future expansion of report options.
- Implemented month-based date selection with clickable days and separate time inputs.
- Added quick-select options for:
- First day of the current month
- First day of the last month
- Added the ability to select a customer for which the report is generated.
- Extended reporting to support generating reports for multiple customers in a single report.
- Updated report generation logic to handle single-customer, multi-customer, and all-customer scopes.
- Prepared reporting output for use cases such as account management and operations-wide overviews.
================================================================================================================================================
## v0.1.14
### Daily Jobs
- Introduced a consistent, case-insensitive multi-level sort order for the Daily Jobs overview: Customer → Backup Software → Backup Type → Job Name.
- Fixed backend ordering issues to ensure server-side data no longer overrides the intended sort logic.
- Ensured sorting is applied before serialization so the UI always reflects the correct order.
- Improved predictability and readability of job listings across environments.
### Veeam Backup for Microsoft 365
- Improved parsing of overall warning messages to correctly extract and display permission- and role-related issues.
- Added support for combined permission and role warnings in M365 reports.
- Ensured detailed permission warnings take precedence over generic “X of X objects processed” messages.
- Fixed incorrect overall message selection and filtered out misleading banner fragments.
- Resolved an indentation error in the parser that caused backend startup failures, restoring stability.
### Overrides Configuration and Matching
- Replaced free-text inputs with dropdowns for Backup Software and Backup Type in Overrides, including:
- Alphabetical sorting
- Preselection of existing values
- A global option at the top of each dropdown
- Fixed PostgreSQL compatibility issues by replacing DISTINCT queries with GROUP BY while preserving case-insensitive sorting.
- Ensured Overrides endpoints no longer crash due to invalid query constructions.
### Overrides Application, Editing, and Deletion
- Made newly created overrides apply immediately and retroactively to all unreviewed runs by default.
- Added full support for editing existing overrides and reapplying changes to unreviewed runs.
- Restricted override deletion to Admin users and ensured proper reprocessing after removal.
- Fixed datetime handling in override edit flows so unchanged values are preserved and NULL constraint violations are avoided.
- Ensured Admin users always see delete actions by consistently passing permission flags to the UI.
### Overrides Matching Logic Improvements
- Extended override matching to use persisted run_object_links joined with customer_objects instead of legacy or non-existent relationships.
- Improved global override matching by resolving backup software and type from MailMessage data when missing on jobs.
- Added support for matching against object-level error messages as well as run-level remarks.
- Ensured all override matching remains case-insensitive and consistent across run-level and object-level evaluations.
### Overrides UI Indicators and Reporting
- Introduced a blue status indicator for runs and jobs where overrides are applied.
- Updated status labels to display “Success (override)” for clearer distinction without changing canonical stored statuses.
- Added persistent override reporting metadata to job runs, including applied override ID, level, and reason.
- Ensured dashboards, Daily Jobs, Run Checks, and popups correctly propagate and display override-based success states.
- Fixed multiple UI rendering issues so overridden runs are no longer misclassified as warnings or missed jobs.
### Daily Jobs Popups
- Fixed popup loading failures and backend unpacking errors related to override handling.
- Ensured popup details consistently load correct run data.
- Aligned popup override detection and status coloring with Run Checks and Daily Jobs overviews.
---
## v0.1.13
This release focuses on improving visibility and consistency of Tickets and Remarks across Run Checks and Job Details, alongside several UI fixes and backend stability improvements.
### Highlights
- Added clear visual indicators for active Tickets and Remarks in the Run Checks overview.
- Enhanced Job Details and Job History to display actual ticket numbers and related remark messages, both in tables and popups.
- Improved navigation consistency by adding direct “Job page” links for Tickets and Remarks.
### Improvements
- Job History popups now reliably show associated ticket numbers and remark content.
- Backend job history data is enriched to support consistent UI rendering.
- Missed-run detection now includes a ±1 hour tolerance window and respects the configured UI timezone.
- Run Checks UI is simplified by hiding last-reviewed columns (data is still retained in the backend).
### Fixes
- Resolved a backend indentation issue that caused Gunicorn startup failures.
- Made frontend parsing of ticket/remark data more robust against malformed or unexpected payloads.
- Fixed JSON encoding issues in HTML data attributes to prevent popup rendering errors.
### Changelog Update
- Simplified the changelog by removing “Current Version” and “In testing” sections.
- The changelog now only shows completed changes.
---
## v0.1.12
### Dashboard & UI
- Corrected dashboard counters so **Expected**, **Missed**, and **Success (override)** statuses are shown accurately.
- Added dedicated counters for Expected and Success (override).
- Fixed layout issues on the Inbox dashboard tiles and improved label wrapping.
- Added safe timezone fallbacks to prevent incorrect status aggregation.
- Restored missing status icons and symbols across Dashboard and Daily Jobs views.
- Cleaned up Job Details UI by removing redundant columns and clarifying schedule display.
- Extended Job History with weekday labels and review metadata (Admin-only visibility).
### Stability & Reliability
- Fixed a Gunicorn startup crash caused by incorrect Python indentation.
- Improved migration robustness for soft-delete columns to prevent startup 502 errors on busy databases.
- Prevented duplicate or unintended regeneration of reviewed “Missed” runs.
### Inbox & Mail Handling
- Introduced soft-delete for Inbox messages with full Admin restore capability.
- Added an Admin-only “Deleted mails” page with audit details (who/when).
- Added popup previews for deleted mails without requiring restore.
- Improved HTML mail handling by extracting content from HTML attachments when the body is empty.
- Added an Admin maintenance action to backfill HTML bodies from existing attachments.
### Feedback & Settings
- Changed Feedback behavior so resolved items remain visible until explicitly deleted.
- Restricted feedback deletion to Admin users only.
- Added a User Settings page allowing users to change their own password securely.
### Backup Parser Enhancements
- Improved Veeam parsing:
- Fixed Health Check Summary parsing.
- Normalized job names by stripping “(Retry)”.
- Added and refined License Key parsing with correct status detection and exclusions from scheduling logic.
- Added and expanded Synology support:
- Active Backup for Business (NL/EN).
- R-Sync (NL/EN).
- Account Protection notifications (informational, no scheduling).
- Added new parsers:
- R-Drive Image.
- Syncovery.
- Ensured correct handling of objects, statuses, and scheduling exclusions where applicable.
### Changelog
- Removed the “Planned” section from the Changelog.
- Future planning is now handled exclusively via the Feedback page.
---
## v0.1.11
### Stability & Bug Fixes
- Fixed multiple page crashes caused by missing imports after refactoring (Jobs, Feedback, Run Checks, Inbox, Daily Jobs).
- Resolved Jinja2 template errors and SQL/runtime issues related to timezone handling.
- Improved robustness by explicitly importing shared helpers to prevent NameError exceptions.
### Run Checks & Review Workflow
- Introduced a new **Run Checks** page to review job runs independently from Daily Jobs.
- Displays all unreviewed runs with no time-based filtering.
- Supports bulk review actions and per-job review via popups.
- Added admin-only features: show reviewed runs, unmark reviewed runs, reviewer metadata, and full audit logging.
- Enhanced popups to group runs per job, include missed runs, and show ticket/remark indicators.
- Added per-job and per-popup status summaries using visual indicators only.
### UI & Visual Consistency
- Unified all job and run status indicators to a single shape differentiated by color.
- Added a clear status legend to the Dashboard, including the new **Expected** state.
- Removed textual status labels across Daily Jobs and Run Checks for a cleaner UI.
- Improved table layouts and widened content areas for better use of 1080p screens.
- Ensured consistent indicator rendering across all pages.
### Timezone & Display Improvements
- Added a configurable timezone setting in Settings.
- Updated all frontend date/time rendering to use the configured timezone instead of UTC.
- Fixed offset issues and restored missing timestamps across multiple pages.
### Missed Runs Logic
- Refined missed run detection to rely only on historically received mail reports.
- Prevented synthetic or never-run schedules from generating false missed runs.
### Settings & Maintenance
- Stabilized **Delete all jobs** by adding schema-tolerant cleanup of all related foreign key references.
- Refactored the Settings page layout using accordions and cards for improved clarity.
- Improved alignment and usability of import/export and user management sections.
### Roles & Access Control
- Added support for multiple roles per user with an active role switcher.
- Fixed role-based menu rendering and ensured permissions are evaluated against the active role.
- Ensured role switching consistently redirects to the Dashboard.
### Theme & UX Fixes
- Fixed manual theme switching (Light/Dark/Auto) and ensured user preferences persist.
- Corrected Inbox EML download functionality by restoring the missing shared import.
Overall, this release significantly improves stability, review workflows, visual consistency, timezone correctness, and administrative reliability, while refining the operator experience and access control model.
---
## v0.1.10
### Performance & Stability
- Reworked **Re-parse all** to process inbox messages in controlled batches, preventing gateway and Gunicorn timeouts on large inboxes.
- Added execution time guards to stop processing before proxy limits are reached.
- Optimized job-matching queries and disabled session autoflush during batch operations to reduce database load.
- Ensured auto-approval and persistence logic only finalize after a full, successful re-parse cycle.
- Restored stable backend startup by fixing decorator ordering issues that caused 502 errors.
### Job Matching & Parsing
- Fixed approved job imports to persist `from_address`, ensuring correct matching during re-parse.
- Improved Veeam Backup Job parsing:
- Extracted and stored multi-line warnings/errors and object-level details with preserved line breaks.
- Ignored VM summary lines (e.g., “X of X VMs processed”) for overall status detection.
- Prevented incorrect overall warnings when issues are object-level only.
- Fixed regressions to ensure backup objects are consistently detected, stored, and displayed across all views.
### UI & UX Improvements
- Added EML download support for Job Details and Daily Jobs, with automatic availability handling and proper 404s when missing.
- Improved rendering to preserve line breaks (pre-wrap) in remarks, overall messages, and object details.
- Reduced visual clutter by moving overall status/messages out of tables and into context-specific popups.
- Standardized changelog version display by removing date suffixes.
- Reordered main navigation for better consistency.
### Daily Jobs & Status Accuracy
- Clarified Daily Jobs status logic:
- Introduced **Expected** for backups not yet due.
- Reserved **Missed** only for jobs past their final expected run time.
- Added last remark excerpts and ensured object details are visible in Daily Jobs popups.
### Tickets, Remarks & Overrides
- Introduced run-datescoped ticket activity with `active_from_date`, ensuring accurate historical and current visibility.
- Implemented identical scoping for remarks, preserving visibility across runs even after resolution.
- Fixed resolve actions to redirect properly in the UI while keeping JSON responses for API/AJAX.
- Improved override handling so changes apply immediately to existing job runs with correct priority resolution.
### New Features
- Added a **Feedback** board with per-user upvoting, admin moderation (resolve/reopen, soft delete), database migrations, and navigation entry.
### Navigation
- Updated menu order to: Inbox, Customers, Jobs, Daily Jobs, Tickets, Overrides, Reports, Settings, Logging, Changelog, Feedback.
---
## v0.1.9
### Changelog System Improvements
- Added and maintained multiple **Completed** changelog entries (v0.1.2 through v0.1.8) with correct release dates.
- Ensured all existing Completed, Testing, and Planned changelog entries were preserved without loss.
- Migrated the Completed changelog from markdown-based content to a **structured, non-markdown format** aligned with the Planned section.
- Simplified changelog rendering logic to use explicit section titles and bullet handling instead of full markdown parsing.
- Standardized formatting across all versions for long-term maintainability and consistent UI rendering.
### Bug Fixes & Stability
- Fixed multiple backend **Python syntax and runtime errors** related to changelog definitions (missing commas, indentation issues, invalid list entries).
- Resolved rendering issues where markdown content was displayed as plain text or collapsed incorrectly.
- Restored application startup stability by fixing missing imports (`re`, `html`) and indentation errors in changelog-related routes.
### Refactoring & Maintainability
- Refactored a large `routes.py` file into multiple smaller route modules.
- Introduced a shared routes module for common imports, helpers, and access control.
- Fixed NameError issues after refactoring by explicitly importing underscored helper functions that are not included via wildcard imports.
- Ensured all split route modules retained full functional parity with the original implementation.
### Release Management Updates
- Moved versions through **Testing → Completed** states correctly:
- v0.1.7 marked as Completed (2025-12-23).
- v0.1.8 added as Completed (2025-12-24) and restored as Current Version.
- Testing advanced to v0.1.9.
- Updated v0.1.8 release notes to document consistent job-matching and auto-approval behavior across all mail processing flows.
- Verified no regressions in changelog structure or rendering after updates.
Overall, v20251225 focused on **hardening the changelog system**, improving backend stability, cleaning up technical debt in routing, and ensuring consistent, reliable release tracking across the application. :contentReference[oaicite:0]{index=0}
---
## v0.1.8
### Overview
This release focuses on making job matching and auto-approval behavior fully consistent across manual inbox actions, automatic mail imports, and the “Re-parse all” process. It also fixes a critical backend startup issue introduced in the re-parse logic.
### Key Changes
- Introduced a single, shared job-matching helper based on a full unique key:
- From address
- Backup software
- Backup type
- Job name
- Updated manual inbox approval to reuse existing jobs when the unique key matches, instead of relying on customer-only matching.
- Aligned inbox “Re-parse all” auto-approve logic with the same shared matching behavior.
- Fixed automatic mail import auto-approve so it correctly:
- Creates a JobRun
- Marks the mail as approved
- Moves the mail to history when a matching job exists
### Re-parse All Improvements
- Auto-approve is now executed during “Re-parse all”, not only on initial mail import.
- After re-parsing, all successfully parsed mails without a linked job are re-evaluated against existing jobs using the full unique key.
- When a matching active job with auto-approve enabled is found:
- The mail is automatically approved
- The mail is linked to the job
- The mail is moved to history
- A corresponding job run is created and shown in Job History
### Fixes
- Resolved an issue where “Re-parse all” previously only updated parse metadata and skipped auto-approve logic, causing historical mails not to appear in job history.
- Fixed a SyntaxError in the re-parse auto-approve logic that caused backend startup failures (Bad Gateway).
- Corrected try/except structure and indentation to ensure re-parse auto-approve runs safely per mail without breaking the overall process.
### Result
- Job matching and auto-approval behavior is now consistent across all mail processing flows.
- Historical mails are correctly linked to jobs and visible in job history.
- Backend stability during startup and re-parse operations is restored.
---
## v0.1.7
### Key Features
- Introduced **export and import functionality for approved jobs**, allowing approved job definitions to be migrated between clean installations via JSON.
- Import process automatically creates missing customers and updates existing jobs based on a unique job identity to prevent duplicates.
### Versioning & Changelog
- Promoted version **v0.1.7** from *Testing* to *Completed*.
- Introduced **v0.1.8** as the new *Testing* release.
- Updated the changelog structure and testing notes to reflect active export/import functionality.
### Parser Enhancements
- **Boxafe**
- Improved parsing for Shared Drives, Domain Accounts (Email, Contact, Drive, Calendar), and handling of *Warning* statuses.
- Corrected object detection logic, ensuring no false objects are created when jobs contain no object data.
- Removed object parsing for Shared Drives backups entirely.
- **Synology Hyper Backup**
- Added full support for Dutch notification emails.
- Improved status detection for Dutch phrasing.
- Confirmed that no objects are parsed for Hyper Backup jobs.
- **Veeam**
- Added support for **Scale-out Backup Repository (SOBR)** notifications, including storage capacity metrics suitable for logging and graphing.
- Added support for **Veeam Health Check** reports with correct object filtering to exclude summary-only entries.
### Job Approval & Auto-Approval Logic
- Significantly refined approved job matching logic to prevent cross-customer approvals.
- Improved auto-approve behavior during **Re-parse all**, Inbox reprocessing, and Graph imports.
- Enhanced resilience against case differences, whitespace, unicode characters, and hidden formatting issues.
- Iteratively refined normalization strategy:
- Ultimately simplified matching to rely primarily on a normalized **From address**, while ensuring parser-consistent values for Backup, Type, and Job Name.
- Ensured deterministic behavior during reprocessing by preventing mutation of message data.
### Stability Fixes
- Fixed multiple crashes and approval errors caused by undefined or incorrect job name handling.
- Resolved issues with duplicate job records (including `NULL` customer IDs) that previously blocked auto-approval.
- Ensured consistent JobRun creation and mail linking during automatic imports and re-parsing.
### Notes
- Due to changes in approval matching logic, **previously approved jobs are expected to be recreated** after these updates.
---
## v0.1.6
### Fixed
- Corrected auto-approve logic to ensure it is properly applied during automatic mail imports.
- Prevented the **Re-parse all** action from re-processing emails that were already approved.
- Ensured approved status is always respected and never overwritten during re-parsing or automatic imports.
- Fixed multiple Jinja2 `TemplateSyntaxError` issues in the base layout that caused 500 Internal Server Errors.
- Restored correct rendering of all pages affected by template errors, including Dashboard, Parsers, and Changelog.
- Resolved Changelog page rendering issues by fixing dictionary access in templates and avoiding conflicts with built-in methods.
### Added
- Introduced a centralized **Changelog** page containing:
- Active production version
- Testing version
- Planned / Todo items
- Completed changes
- Added the Changelog entry point to the main navigation.
- Applied a clear versioning convention, e.g. `v0.1.7 (v20251222-03)` for testing releases.
- Marked version `0.1.6` as the active production release.
### Planned
- Export and import of jobs to allow restoring approved jobs after a clean installation.
- Always register “New license key is not available” as an error.
- Support for a scale-out backup repository Cloud Connect Immutable parser.
- Ability to attach EML files to Daily Jobs and Job Details.
- Fix for Light/Dark theme switching so users can properly change themes.
- Restrict ticket creation and editing to Operator and Admin roles only.
### Known Bugs
- Emails that were previously approved remain in the Inbox instead of being removed, even though they appear auto-approved and linked to Jobs.
---
## v0.1.5
### Overview
This release focuses on restoring Microsoft Graph functionality, improving application reliability, and introducing a robust reset mechanism to allow a clean restart of the application state.
### Key Fixes
- Restored Microsoft Graph folder retrieval by fixing an incorrect import that caused a `ModuleNotFoundError`.
- Resolved failures in the automatic mail importer caused by `signal`-based timeout handling by replacing it with a thread-safe mechanism.
- Fixed backend startup crashes and Bad Gateway errors related to the automatic mail importer.
- Implemented missing backend logic required for automatic imports to function correctly.
### New Features
- Added an **Application Reset** option in the Settings page.
- Introduced a confirmation step to prevent accidental resets.
### Improvements & Changes
- Implemented full backend support for a complete application reset.
- Reset now clears all application data, including:
- Approved and pending jobs
- Imported and processed emails
- Daily job runs
- Logs
- User-defined settings (system defaults are preserved)
- Ensured database cleanup runs in the correct order to respect foreign key constraints.
- Aligned automatic mail import logic with the existing manual import flow for consistent behavior.
- Applied the automatic import cutoff date directly via a Microsoft Graph `$filter`, leaving older emails untouched in the inbox.
### Result
- Graph API functionality is fully restored.
- Automatic mail import runs reliably on its configured schedule.
- The application can now be safely reset to a clean, fresh-install state when needed.
---
## v0.1.4
This release focuses on improving backend stability, database reliability, and consistency in object parsing and mail handling.
### Key Changes
- Database migrations for tickets and remarks were stabilized by running each migration in its own transaction scope, preventing closed-connection errors during startup.
- Backend startup issues causing Gunicorn failures and 502 Bad Gateway errors were resolved.
- The title field was fully removed from tickets and remarks, simplifying both backend validation and UI forms to use only descriptive content.
- Manual mail imports were aligned with the **Re-parse all** behavior, ensuring immediate and consistent object detection.
- Object visibility on the Daily Jobs page was corrected for previously approved jobs.
- Manual imports were hardened against Microsoft Graph timeouts by adding DNS preflight checks and safely skipping message moves when Graph is unreachable.
### Improvements
- Eliminated `ResourceClosedError` exceptions during backend boot.
- Increased reliability of migrations and overall application startup.
- Ensured object parsing is consistently re-evaluated on every job run, with correct detection of added or removed objects.
- Prevented internal server errors and Gunicorn worker timeouts caused by long-running external Graph operations.
Overall, v0.1.4 significantly improves robustness, consistency, and fault tolerance across database migrations, job parsing, and manual mail imports.
---
## v0.1.3
### Logging & Stability
- Fixed logging persistence so log entries are consistently stored in the database.
- Resolved cases where certain log events were not stored due to object lifecycle handling.
- Improved reliability of log creation during background/asynchronous processes.
- Corrected log retrieval so stored logs are properly fetched and shown in the web UI.
- Added pagination to the logging overview (20 entries per page).
- Extended the logging view to show all available log fields and fixed missing columns in the UI.
### UI & Table Layout Improvements
- Improved the logging page usability by placing pagination controls at the top and keeping them available at the bottom.
- Increased logging table width to better fit a 1080p layout.
- Fixed column layout so all columns remain in consistent positions regardless of content length.
- Updated status styling to use colored text only within the status column (Success, Warning, Error/Failed, Missed), including clear differentiation for overrides.
- Fixed JavaScript errors in the Daily Jobs popup that prevented rendering.
### Jobs & Daily Jobs Enhancements
- Standardized default sorting for both Jobs and Daily Jobs tables (Customer → Backup → Type → Job name).
- Persisted the Daily Jobs start date setting in the database and ensured it reloads correctly in the Settings UI.
- Corrected missed-status calculation to start from the configured Daily Jobs start date.
- Improved Daily Jobs table readability:
- Moved the number of runs into a dedicated Runs column.
- Prevented layout shifting caused by variable text in the Last result column.
- Restored the original Runs visual representation and adjusted placement for better readability.
- Reduced the Last result column width so only status text is shown and the Runs column remains visible.
### Parsing & Data Normalization
- Stripped retry suffixes like “(Retry 1)”, “(Retry 2)”, etc. from job names so retries dont create separate job identities.
- Extended the NAKIVO parser to support VMware Replication job emails:
- Detects job type (Backup vs Replication) based on email content.
- Improves replication job name parsing.
- Extracts VM names from the Objects/Virtual Machines section.
- Maps overall job status correctly for replication reports.
### Tickets & Remarks (New Feature Area)
- Added database schema for globally unique, persistent tickets linked to job runs for long-term reporting:
- New tables: tickets, ticket_scopes, ticket_job_runs.
- Ticket codes are globally unique using format TYYYYMMDD.NNNN.
- Tickets require at least one customer scope.
- Added database schema for remarks with scoped attachment and persistent linkage to job runs:
- New tables: remarks, remark_scopes, remark_job_runs.
- Implemented a new Tickets page with tabbed navigation (Tickets / Remarks):
- Overviews with filtering.
- Detail views showing scopes, linked job runs, and actions.
- Added indicators in Daily Jobs to show active tickets and/or remarks.
- Added ticket/remark management in the job run popup (create, edit, resolve).
- Introduced consistent icons for tickets, remarks, actions, and status indicators.
- Added backend API endpoints for listing/creating/updating/resolving/linking tickets and remarks, plus an endpoint to retrieve all alerts for a specific job run.
---
## v0.1.2
This release focuses on improved parser support, more robust data cleanup, and a fully reworked in-app logging and object persistence system.
### Parser & support
- Extended the Synology Hyper Backup mail parser with proper recognition of Strato HiDrive backups.
- Added support for parsing job names from the “Backup Task:” field for Strato HiDrive.
- Correct handling of successful runs without listed objects.
- Added a Strato HiDrive example to the parser templates for validation and reference.
### Administration & cleanup
- Introduced an admin-only action to delete all jobs in a single operation.
- Ensured related run mails are moved back to the Inbox when jobs are deleted.
- Fixed foreign key constraint issues by enforcing the correct deletion order:
- run_object_links first
- job_object_links next
- then job runs and jobs
- Stabilized the “Delete all jobs” action to fully clean up all related data.
### Logging (reworked)
- Moved logging away from container/stdout logging to in-app logging.
- Introduced AdminLog-based logging for:
- Mail import
- Auto-approval
- Manual job approval
- Job deletion
- Added detailed logging per imported and auto-approved email.
- Added summary logging at the end of each mail import run.
- Ensured all relevant events are logged exclusively via the AdminLog table and visible on the Logging page.
### Object persistence
- Restored persistence of parsed objects after manual approval of inbox mails.
- Restored persistence of parsed objects during auto-approval (reparse-all).
- Ensured objects from approved mails are:
- Upserted into `customer_objects`
- Linked to jobs via `job_object_links` (with first/last seen tracking)
- Linked to runs via `run_object_links` (with status and error details)
- Added centralized helper logic to ensure consistent object persistence.
- Added an admin-only maintenance action to backfill missing object links for already approved runs.
- Object persistence failures no longer block mail approval.
- Daily Jobs and Run detail views correctly display objects again for both new and historical runs after backfilling.

69
docs/manifest.md Normal file
View File

@ -0,0 +1,69 @@
# Backupchecks repository manifest
This document describes the main directories and important files in the Backupchecks repository.
## Top-level
- `containers/backupchecks/`
Docker build context for the application image. Everything under this directory is copied into the image.
- `docs/`
Documentation that is not baked into the image (design notes, migration policy, etc.).
- `deploy/`
Portainer stack files and deployment notes. These files are NOT copied into the image.
## containers/backupchecks
- `Dockerfile`
Builds the Backupchecks application image.
- Installs Python dependencies from `requirements.txt`.
- Copies the `src/` tree into `/app/src`.
- Starts the app using Gunicorn and the `create_app()` factory.
- `requirements.txt`
Python dependencies for the application.
- `src/backend/app/`
Backend application package.
- `__init__.py`
Application factory (`create_app()`), database initialization and migrations bootstrap.
- `config.py`
Reads configuration from environment variables (database connection, app env, timezone).
- `database.py`
SQLAlchemy database instance (`db`).
- `models.py`
ORM models, including:
- `User` login user with `username`, optional `email`, `password_hash`, `role`.
- `SystemSettings` persisted configuration for Graph, folders and import behaviour.
- `auth/`
Authentication and authorization:
- `__init__.py` login manager setup.
- `routes.py` login, logout, initial admin setup, password reset placeholder.
- `main/`
Main UI and navigation:
- `routes.py` dashboard, Inbox, Customers, Jobs, Daily Jobs, Overrides, Reports, Settings, Logging.
- `migrations.py`
In-image SQL migrations. See `docs/migrations.md` for details.
- `src/templates/`
Jinja2 templates used by the Flask application.
- `layout/base.html`
Base layout, navbar, flash messages.
- `auth/*.html`
Authentication templates (login, initial admin setup, password reset request).
- `main/*.html`
Page templates for dashboard, Inbox, Customers, Jobs, Daily Jobs, Overrides, Reports, Settings, Logging.
- `src/static/`
Static assets (CSS, JS, images). Currently minimal, can grow as the UI is developed.

67
docs/migrations.md Normal file
View File

@ -0,0 +1,67 @@
# Database migrations policy
This document describes how database schema changes are handled for Backupchecks .
## Overview
- The baseline schema is defined in `backend/app/models.py`.
- On application startup, the following is executed inside `create_app()`:
1. `db.create_all()` creates any missing tables and columns defined in the models.
2. `run_migrations()` executes in-image SQL migrations from `backend/app/migrations.py`.
This approach allows:
- Clean databases to be created automatically.
- Existing databases to be upgraded in-place without manual SQL.
- Safe repeated restarts: migrations are idempotent and can be run multiple times.
## Adding migrations
When you change the schema in a way that is not automatically covered by `db.create_all()` (for example,
altering column nullability, adding constraints, backfilling data), follow these steps:
1. Add or adjust the corresponding model(s) in `models.py`.
2. In `migrations.py`:
- Add a new function, for example:
- `def migrate_xyz():`
- Perform the required SQL using `db.get_engine()` and `sqlalchemy.text`.
- Always check the current state first (e.g. whether a column or constraint already exists).
- Call this function from `run_migrations()` in the correct order.
3. Do NOT remove older migration functions. They must remain so that:
- Existing databases can still be upgraded from older versions.
- New installations run all migrations but older ones become no-ops because their checks see that the
changes are already applied.
4. Each migration must be **idempotent**:
- It should detect whether its change is already in place and then exit without error.
- This allows `run_migrations()` to be executed on every startup.
## Current migrations (initial set)
Implemented in `backend/app/migrations.py`:
- `migrate_add_username_to_users()`
- Adds a `username` column to the `users` table if it does not exist.
- Backfills `username` from `email` where possible.
- Sets `username` to `NOT NULL`.
- Adds a UNIQUE constraint on `users.username`.
- `migrate_make_email_nullable()`
- Ensures the `email` column on `users` is nullable.
- If the column is currently `NOT NULL`, the migration executes:
`ALTER TABLE "users" ALTER COLUMN email DROP NOT NULL`.
- `run_migrations()`
- Calls the above migrations in order.
- Logs progress to stdout so changes are visible in container / Portainer logs.
## Future changes
- Every time you introduce a non-trivial schema change, update:
- `backend/app/models.py`
- `backend/app/migrations.py`
- This document (`docs/migrations.md`) add a short description of the new migration.
- When sharing the repository state (for example in a ZIP), always include the current `migrations.py`
and this document so the migration history and policy are clear.

1
version.txt Normal file
View File

@ -0,0 +1 @@
v0.1.14