Auto-commit local changes before build (2026-01-03 13:59:54) #20

Merged
ivooskamp merged 1 commits from v20260103-07-reports-advanced-reporting-foundation into main 2026-01-06 09:27:01 +01:00
6 changed files with 413 additions and 1 deletions

View File

@ -1 +1 @@
v20260103-06-reports-delete-button-fix v20260103-07-reports-advanced-reporting-foundation

View File

@ -6,6 +6,17 @@ import csv
import io import io
def _safe_json_dict(value):
if not value:
return {}
if isinstance(value, dict):
return value
try:
return json.loads(value)
except Exception:
return {}
def _clamp_int(value, default: int, min_v: int, max_v: int) -> int: def _clamp_int(value, default: int, min_v: int, max_v: int) -> int:
try: try:
v = int(value) v = int(value)
@ -109,6 +120,7 @@ def api_reports_list():
"period_start": r.period_start.isoformat() if r.period_start else "", "period_start": r.period_start.isoformat() if r.period_start else "",
"period_end": r.period_end.isoformat() if r.period_end else "", "period_end": r.period_end.isoformat() if r.period_end else "",
"schedule": r.schedule or "", "schedule": r.schedule or "",
"report_config": _safe_json_dict(getattr(r, "report_config", None)),
"created_at": r.created_at.isoformat() if r.created_at else "", "created_at": r.created_at.isoformat() if r.created_at else "",
} }
for r in rows for r in rows
@ -130,6 +142,10 @@ def api_reports_create():
output_format = (payload.get("output_format") or "csv").strip() or "csv" output_format = (payload.get("output_format") or "csv").strip() or "csv"
schedule = (payload.get("schedule") or "").strip() or None schedule = (payload.get("schedule") or "").strip() or None
report_config = payload.get("report_config")
if report_config is not None and not isinstance(report_config, (dict, list, str)):
report_config = None
try: try:
customer_scope, customer_ids = _parse_customer_scope(payload) customer_scope, customer_ids = _parse_customer_scope(payload)
except Exception as exc: except Exception as exc:
@ -156,6 +172,7 @@ def api_reports_create():
period_start=period_start, period_start=period_start,
period_end=period_end, period_end=period_end,
schedule=schedule, schedule=schedule,
report_config=json.dumps(report_config) if isinstance(report_config, (dict, list)) else (report_config if isinstance(report_config, str) else None),
created_by_user_id=getattr(current_user, "id", None), created_by_user_id=getattr(current_user, "id", None),
) )
db.session.add(r) db.session.add(r)
@ -163,6 +180,165 @@ def api_reports_create():
return {"id": r.id} return {"id": r.id}
@main_bp.route("/api/reports/columns", methods=["GET"])
@login_required
def api_reports_columns():
"""Return column metadata used by the Reports UI.
This is a UI helper endpoint so the frontend can render grouped column selectors.
"""
err = _require_reporting_role()
if err is not None:
return err
# Note: Columns map to fields returned by /api/reports/<id>/data?view=snapshot
# plus some derived fields the UI can compute client-side.
return {
"groups": [
{
"name": "Job Information",
"items": [
{"key": "job_name", "label": "Job name"},
{"key": "backup_software", "label": "Job type"},
{"key": "backup_type", "label": "Repository / Target"},
{"key": "customer_name", "label": "Customer"},
],
},
{
"name": "Status",
"items": [
{"key": "status", "label": "Last run status"},
{"key": "missed", "label": "Missed"},
{"key": "override_applied", "label": "Override applied"},
],
},
{
"name": "Time & Performance",
"items": [
{"key": "run_at", "label": "Start time"},
{"key": "reviewed_at", "label": "Reviewed at"},
],
},
{
"name": "Reliability",
"items": [
{"key": "remark", "label": "Remark"},
],
},
]
}
@main_bp.route("/api/reports/<int:report_id>/stats", methods=["GET"])
@login_required
def api_reports_stats(report_id: int):
"""Return lightweight KPI + chart datasets for a report.
Data is derived from report_object_snapshots, which is generated by
POST /api/reports/<id>/generate.
"""
err = _require_reporting_role()
if err is not None:
return err
ReportDefinition.query.get_or_404(report_id)
# KPI counts
# We treat missed runs as their own bucket, regardless of status string.
row = db.session.execute(
text(
"""
SELECT
COUNT(*)::INTEGER AS total_runs,
SUM(CASE WHEN missed = TRUE THEN 1 ELSE 0 END)::INTEGER AS missed_runs,
SUM(CASE WHEN missed = FALSE AND COALESCE(status,'') ILIKE 'success%' THEN 1 ELSE 0 END)::INTEGER AS success_runs,
SUM(CASE WHEN missed = FALSE AND COALESCE(status,'') ILIKE 'warning%' THEN 1 ELSE 0 END)::INTEGER AS warning_runs,
SUM(CASE WHEN missed = FALSE AND COALESCE(status,'') ILIKE 'fail%' THEN 1 ELSE 0 END)::INTEGER AS failed_runs,
SUM(CASE WHEN override_applied = TRUE THEN 1 ELSE 0 END)::INTEGER AS override_runs
FROM report_object_snapshots
WHERE report_id = :rid
"""
),
{"rid": report_id},
).fetchone()
total_runs = int(row.total_runs or 0) if row else 0
success_runs = int(row.success_runs or 0) if row else 0
warning_runs = int(row.warning_runs or 0) if row else 0
failed_runs = int(row.failed_runs or 0) if row else 0
missed_runs = int(row.missed_runs or 0) if row else 0
override_runs = int(row.override_runs or 0) if row else 0
success_rate = 0.0
if total_runs > 0:
# Consider overrides as success for success_rate.
success_rate = ((success_runs + override_runs) / float(total_runs)) * 100.0
# Trend datasets (per day)
trend_rows = db.session.execute(
text(
"""
SELECT
DATE_TRUNC('day', run_at) AS day,
COUNT(*)::INTEGER AS total,
SUM(CASE WHEN missed = TRUE THEN 1 ELSE 0 END)::INTEGER AS missed,
SUM(CASE WHEN missed = FALSE AND COALESCE(status,'') ILIKE 'success%' THEN 1 ELSE 0 END)::INTEGER AS success,
SUM(CASE WHEN missed = FALSE AND COALESCE(status,'') ILIKE 'warning%' THEN 1 ELSE 0 END)::INTEGER AS warning,
SUM(CASE WHEN missed = FALSE AND COALESCE(status,'') ILIKE 'fail%' THEN 1 ELSE 0 END)::INTEGER AS failed
FROM report_object_snapshots
WHERE report_id = :rid
AND run_at IS NOT NULL
GROUP BY DATE_TRUNC('day', run_at)
ORDER BY DATE_TRUNC('day', run_at) ASC
"""
),
{"rid": report_id},
).fetchall()
trend = []
for tr in trend_rows or []:
day = tr.day
day_iso = day.date().isoformat() if hasattr(day, "date") else str(day)
total = int(tr.total or 0)
succ = int(tr.success or 0)
fail = int(tr.failed or 0)
succ_rate = 0.0
if total > 0:
succ_rate = (succ / float(total)) * 100.0
trend.append(
{
"day": day_iso,
"total": total,
"success": succ,
"warning": int(tr.warning or 0),
"failed": fail,
"missed": int(tr.missed or 0),
"success_rate": succ_rate,
}
)
return {
"kpis": {
"total_runs": total_runs,
"success_runs": success_runs,
"warning_runs": warning_runs,
"failed_runs": failed_runs,
"missed_runs": missed_runs,
"override_runs": override_runs,
"success_rate": float(success_rate),
},
"charts": {
"status_distribution": {
"success": success_runs + override_runs,
"warning": warning_runs,
"failed": failed_runs,
"missed": missed_runs,
},
"trend": trend,
},
}
@main_bp.route("/api/reports/<int:report_id>", methods=["DELETE"]) @main_bp.route("/api/reports/<int:report_id>", methods=["DELETE"])
@login_required @login_required
@ -177,6 +353,44 @@ def api_reports_delete(report_id: int):
return {"status": "ok"} return {"status": "ok"}
@main_bp.route("/api/reports/<int:report_id>", methods=["PUT"])
@login_required
def api_reports_update(report_id: int):
"""Update mutable report fields.
Currently only supports updating:
- name
- description
- report_config
"""
err = _require_reporting_role()
if err is not None:
return err
report = ReportDefinition.query.get_or_404(report_id)
payload = request.get_json(silent=True) or {}
if "name" in payload:
report.name = (payload.get("name") or "").strip() or report.name
if "description" in payload:
desc = (payload.get("description") or "").strip()
report.description = desc or None
if "report_config" in payload:
rc = payload.get("report_config")
if rc is None:
report.report_config = None
elif isinstance(rc, (dict, list)):
report.report_config = json.dumps(rc)
elif isinstance(rc, str):
report.report_config = rc
else:
return {"error": "report_config must be an object, array, string, or null."}, 400
db.session.commit()
return {"status": "ok"}
@main_bp.route("/api/reports/<int:report_id>/generate", methods=["POST"]) @main_bp.route("/api/reports/<int:report_id>/generate", methods=["POST"])
@login_required @login_required
def api_reports_generate(report_id: int): def api_reports_generate(report_id: int):
@ -378,6 +592,144 @@ def api_reports_data(report_id: int):
} }
def _normalize_status_row(status: str, missed: bool) -> str:
if missed:
return "missed"
s = (status or "").strip().lower()
if s.startswith("success"):
return "success"
if s.startswith("warning"):
return "warning"
if s.startswith("fail"):
return "failed"
return "unknown"
@main_bp.route("/api/reports/<int:report_id>/stats", methods=["GET"])
@login_required
def api_reports_stats(report_id: int):
"""Return aggregated KPI + chart data for a report.
Designed to support the "Overview" tab (KPIs + charts) described in the reporting proposal.
"""
err = _require_reporting_role()
if err is not None:
return err
report = ReportDefinition.query.get_or_404(report_id)
# If the report hasn't been generated yet, these tables can be empty.
# Return empty-but-valid structures so the UI can render deterministically.
engine = db.get_engine()
with engine.begin() as conn:
# KPI (runs)
kpi = conn.execute(
text(
"""
SELECT
COUNT(*)::INTEGER AS total_runs,
SUM(CASE WHEN (COALESCE(status,'') ILIKE 'success%' AND override_applied = FALSE AND missed = FALSE) THEN 1 ELSE 0 END)::INTEGER AS success_runs,
SUM(CASE WHEN override_applied = TRUE AND missed = FALSE THEN 1 ELSE 0 END)::INTEGER AS success_override_runs,
SUM(CASE WHEN (COALESCE(status,'') ILIKE 'warning%' AND missed = FALSE) THEN 1 ELSE 0 END)::INTEGER AS warning_runs,
SUM(CASE WHEN (COALESCE(status,'') ILIKE 'fail%' AND missed = FALSE) THEN 1 ELSE 0 END)::INTEGER AS failed_runs,
SUM(CASE WHEN missed = TRUE THEN 1 ELSE 0 END)::INTEGER AS missed_runs,
COUNT(DISTINCT job_id)::INTEGER AS total_jobs
FROM report_object_snapshots
WHERE report_id = :rid
"""
),
{"rid": report_id},
).fetchone()
total_runs = int(kpi.total_runs or 0) if kpi else 0
success_runs = int(kpi.success_runs or 0) if kpi else 0
success_override_runs = int(kpi.success_override_runs or 0) if kpi else 0
warning_runs = int(kpi.warning_runs or 0) if kpi else 0
failed_runs = int(kpi.failed_runs or 0) if kpi else 0
missed_runs = int(kpi.missed_runs or 0) if kpi else 0
total_jobs = int(kpi.total_jobs or 0) if kpi else 0
success_rate = 0.0
if total_runs > 0:
success_rate = ((success_runs + success_override_runs) / float(total_runs)) * 100.0
# Status distribution (for donut/pie)
status_distribution = [
{"key": "success", "value": success_runs + success_override_runs},
{"key": "warning", "value": warning_runs},
{"key": "failed", "value": failed_runs},
{"key": "missed", "value": missed_runs},
]
# Trends over time (day buckets)
# Note: uses report.period_start/end so the UI can render the x-axis consistently.
trend_rows = conn.execute(
text(
"""
SELECT
DATE_TRUNC('day', run_at) AS day,
SUM(CASE WHEN (COALESCE(status,'') ILIKE 'success%' AND override_applied = FALSE AND missed = FALSE) THEN 1 ELSE 0 END)::INTEGER AS success_runs,
SUM(CASE WHEN (override_applied = TRUE AND missed = FALSE) THEN 1 ELSE 0 END)::INTEGER AS success_override_runs,
SUM(CASE WHEN (COALESCE(status,'') ILIKE 'warning%' AND missed = FALSE) THEN 1 ELSE 0 END)::INTEGER AS warning_runs,
SUM(CASE WHEN (COALESCE(status,'') ILIKE 'fail%' AND missed = FALSE) THEN 1 ELSE 0 END)::INTEGER AS failed_runs,
SUM(CASE WHEN missed = TRUE THEN 1 ELSE 0 END)::INTEGER AS missed_runs,
COUNT(*)::INTEGER AS total_runs
FROM report_object_snapshots
WHERE report_id = :rid
AND run_at IS NOT NULL
GROUP BY 1
ORDER BY 1 ASC
"""
),
{"rid": report_id},
).fetchall()
trends = []
for tr in trend_rows or []:
day_total = int(tr.total_runs or 0)
day_success = int(tr.success_runs or 0) + int(tr.success_override_runs or 0)
day_rate = 0.0
if day_total > 0:
day_rate = (day_success / float(day_total)) * 100.0
trends.append(
{
"day": tr.day.isoformat() if tr.day else "",
"success_rate": day_rate,
"failed_runs": int(tr.failed_runs or 0),
"warning_runs": int(tr.warning_runs or 0),
"missed_runs": int(tr.missed_runs or 0),
"total_runs": day_total,
}
)
# Performance placeholders (requires duration/data extraction work in later phases)
performance = {
"avg_runtime_seconds": None,
"top_jobs_by_runtime": [],
"top_jobs_by_data": [],
}
return {
"period_start": report.period_start.isoformat() if report.period_start else "",
"period_end": report.period_end.isoformat() if report.period_end else "",
"kpis": {
"total_jobs": total_jobs,
"total_runs": total_runs,
"success_runs": success_runs + success_override_runs,
"warning_runs": warning_runs,
"failed_runs": failed_runs,
"missed_runs": missed_runs,
"success_rate": success_rate,
},
"charts": {
"status_distribution": status_distribution,
"trends": trends,
"performance": performance,
},
}
@main_bp.route("/api/reports/<int:report_id>/export.csv", methods=["GET"]) @main_bp.route("/api/reports/<int:report_id>/export.csv", methods=["GET"])
@login_required @login_required
def api_reports_export_csv(report_id: int): def api_reports_export_csv(report_id: int):

View File

@ -779,9 +779,43 @@ def run_migrations() -> None:
migrate_job_runs_override_metadata() migrate_job_runs_override_metadata()
migrate_news_tables() migrate_news_tables()
migrate_reporting_tables() migrate_reporting_tables()
migrate_reporting_report_config()
print("[migrations] All migrations completed.") print("[migrations] All migrations completed.")
def migrate_reporting_report_config() -> None:
"""Add report_definitions.report_config column if missing.
Stores JSON config for reporting UI (selected columns, charts, filters, templates).
"""
table = "report_definitions"
column = "report_config"
try:
engine = db.get_engine()
except Exception as exc:
print(f"[migrations] Could not get engine for reporting report_config migration: {exc}")
return
inspector = inspect(engine)
try:
existing_columns = {col["name"] for col in inspector.get_columns(table)}
except Exception as exc:
print(f"[migrations] {table} table not found for report_config migration, skipping: {exc}")
return
if column in existing_columns:
print("[migrations] report_definitions.report_config already exists, skipping.")
return
print("[migrations] Adding report_definitions.report_config column...")
with engine.begin() as conn:
conn.execute(text('ALTER TABLE "report_definitions" ADD COLUMN report_config TEXT'))
print("[migrations] migrate_reporting_report_config completed.")
def migrate_job_runs_override_metadata() -> None: def migrate_job_runs_override_metadata() -> None:
"""Add override metadata columns to job_runs for reporting. """Add override metadata columns to job_runs for reporting.

View File

@ -551,6 +551,10 @@ class ReportDefinition(db.Model):
# For scheduled reports in later phases (cron / RRULE style string) # For scheduled reports in later phases (cron / RRULE style string)
schedule = db.Column(db.String(255), nullable=True) schedule = db.Column(db.String(255), nullable=True)
# JSON report definition for UI (columns, charts, filters, templates)
# Stored as TEXT to remain flexible and allow future PDF rendering.
report_config = db.Column(db.Text, nullable=True)
created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)

View File

@ -44,6 +44,24 @@
- Ensured the DELETE API call is correctly triggered and the reports list is refreshed after deletion. - Ensured the DELETE API call is correctly triggered and the reports list is refreshed after deletion.
- Restored correct CSS classes for the server-rendered Delete button to ensure proper binding. - Restored correct CSS classes for the server-rendered Delete button to ensure proper binding.
---
## v20260103-07-reports-advanced-reporting-foundation
### Changed
- Introduced configurable report definitions to support advanced reporting options.
- Reports can now store selected columns, chart preferences, filters, and layout settings as structured configuration data.
- Extended Reports API to read and write report configuration data.
### Added
- Database migration adding `report_config` field to report definitions.
- New API endpoint to expose available report columns grouped by category for report configuration.
- New statistics endpoint providing aggregated KPI data and chart-ready datasets for reports.
- Foundation for graphical report rendering (charts and summaries) alongside tabular data.
### Fixed
- Ensured report deletion flow remains compatible with extended report definition handling.
================================================================================================================================================ ================================================================================================================================================
## v0.1.15 ## v0.1.15

View File

@ -56,6 +56,10 @@ Implemented in `backend/app/migrations.py`:
- Calls the above migrations in order. - Calls the above migrations in order.
- Logs progress to stdout so changes are visible in container / Portainer logs. - Logs progress to stdout so changes are visible in container / Portainer logs.
- `migrate_reporting_report_config()`
- Adds `report_definitions.report_config` (TEXT) if it does not exist.
- Stores the JSON report definition for the reporting UI (selected columns, chart types, filters) so the same definition can later be reused for PDF export.
## Future changes ## Future changes
- Every time you introduce a non-trivial schema change, update: - Every time you introduce a non-trivial schema change, update: