backupchecks/containers/backupchecks/src/backend/app/main/routes_jobs.py

468 lines
16 KiB
Python

from .routes_shared import * # noqa: F401,F403
from .routes_shared import (
_apply_overrides_to_run,
_describe_schedule,
_format_datetime,
_get_ui_timezone_name,
_infer_schedule_map_from_runs,
_schedule_map_to_desc,
_to_amsterdam_date,
)
@main_bp.route("/jobs")
@login_required
@roles_required("admin", "operator", "viewer")
def jobs():
# Join with customers for display
jobs = (
Job.query
.filter(Job.archived.is_(False))
.outerjoin(Customer, Customer.id == Job.customer_id)
.add_columns(
Job.id,
Job.backup_software,
Job.backup_type,
Job.job_name,
Customer.name.label("customer_name"),
)
.order_by(Customer.name.asc().nullslast(), Job.backup_software.asc(), Job.backup_type.asc(), Job.job_name.asc())
.all()
)
rows = []
for row in jobs:
# Depending on SQLAlchemy version, row may be tuple-like
job_id = row.id
backup_software = row.backup_software
backup_type = row.backup_type
job_name = row.job_name
customer_name = getattr(row, "customer_name", None)
rows.append(
{
"id": job_id,
"customer_name": customer_name or "",
"backup_software": backup_software or "",
"backup_type": backup_type or "",
"job_name": job_name or "",
}
)
can_manage_jobs = current_user.is_authenticated and get_active_role() in ("admin", "operator")
return render_template(
"main/jobs.html",
jobs=rows,
can_manage_jobs=can_manage_jobs,
)
@main_bp.route("/jobs/<int:job_id>/archive", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def archive_job(job_id: int):
job = Job.query.get_or_404(job_id)
if job.archived:
flash("Job is already archived.", "info")
return redirect(url_for("main.jobs"))
job.archived = True
job.archived_at = datetime.utcnow()
job.archived_by_user_id = current_user.id
db.session.commit()
try:
log_admin_event("job_archived", f"Archived job {job.id}", details=f"job_name={job.job_name}")
except Exception:
pass
flash("Job archived.", "success")
return redirect(url_for("main.jobs"))
@main_bp.route("/archived-jobs")
@login_required
@roles_required("admin")
def archived_jobs():
rows = (
Job.query
.filter(Job.archived.is_(True))
.outerjoin(Customer, Customer.id == Job.customer_id)
.add_columns(
Job.id,
Job.backup_software,
Job.backup_type,
Job.job_name,
Job.archived_at,
Customer.name.label("customer_name"),
)
.order_by(Customer.name.asc().nullslast(), Job.backup_software.asc(), Job.backup_type.asc(), Job.job_name.asc())
.all()
)
out = []
for row in rows:
out.append(
{
"id": row.id,
"customer_name": getattr(row, "customer_name", "") or "",
"backup_software": row.backup_software or "",
"backup_type": row.backup_type or "",
"job_name": row.job_name or "",
"archived_at": _format_datetime(row.archived_at),
}
)
return render_template("main/archived_jobs.html", jobs=out)
@main_bp.route("/jobs/<int:job_id>/unarchive", methods=["POST"])
@login_required
@roles_required("admin")
def unarchive_job(job_id: int):
job = Job.query.get_or_404(job_id)
if not job.archived:
flash("Job is not archived.", "info")
return redirect(url_for("main.archived_jobs"))
job.archived = False
job.archived_at = None
job.archived_by_user_id = None
db.session.commit()
try:
log_admin_event("job_unarchived", f"Unarchived job {job.id}", details=f"job_name={job.job_name}")
except Exception:
pass
flash("Job restored.", "success")
return redirect(url_for("main.archived_jobs"))
@main_bp.route("/jobs/<int:job_id>")
@login_required
@roles_required("admin", "operator", "viewer")
def job_detail(job_id: int):
job = Job.query.get_or_404(job_id)
# History pagination
try:
page = int(request.args.get("page", "1"))
except ValueError:
page = 1
if page < 1:
page = 1
per_page = 20
query = JobRun.query.filter_by(job_id=job.id)
total_items = query.count()
total_pages = max(1, math.ceil(total_items / per_page)) if total_items else 1
if page > total_pages:
page = total_pages
runs = (
query.order_by(JobRun.run_at.desc().nullslast(), JobRun.id.desc())
.offset((page - 1) * per_page)
.limit(per_page)
.all()
)
# Tickets: mark runs that fall within the ticket active window
ticket_rows = []
ticket_open_count = 0
ticket_total_count = 0
remark_rows = []
remark_open_count = 0
remark_total_count = 0
run_dates = []
run_date_map = {}
for r in runs:
rd = _to_amsterdam_date(r.run_at) or _to_amsterdam_date(datetime.utcnow())
run_date_map[r.id] = rd
if rd:
run_dates.append(rd)
if run_dates:
min_date = min(run_dates)
max_date = max(run_dates)
try:
rows = (
db.session.execute(
text(
"""
SELECT t.active_from_date, t.resolved_at, t.ticket_code
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
AND t.active_from_date <= :max_date
AND (
COALESCE(ts.resolved_at, t.resolved_at) IS NULL
OR ((COALESCE(ts.resolved_at, t.resolved_at) AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :min_date
)
"""
),
{"job_id": job.id, "min_date": min_date,
"ui_tz": _get_ui_timezone_name(), "max_date": max_date},
)
.mappings()
.all()
)
for rr in rows:
active_from = rr.get("active_from_date")
resolved_at = rr.get("resolved_at")
resolved_date = _to_amsterdam_date(resolved_at) if resolved_at else None
ticket_rows.append({"active_from_date": active_from, "resolved_date": resolved_date, "ticket_code": rr.get("ticket_code")})
except Exception:
ticket_rows = []
if run_dates:
min_date = min(run_dates)
max_date = max(run_dates)
try:
rows = (
db.session.execute(
text(
"""
SELECT COALESCE(
r.active_from_date,
((r.start_date AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date)
) AS active_from_date,
r.resolved_at,
r.title,
r.body
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
AND COALESCE(
r.active_from_date,
((r.start_date AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date)
) <= :max_date
AND (
r.resolved_at IS NULL
OR ((r.resolved_at AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :min_date
)
"""
),
{"job_id": job.id, "min_date": min_date,
"ui_tz": _get_ui_timezone_name(), "max_date": max_date},
)
.mappings()
.all()
)
for rr in rows:
active_from = rr.get("active_from_date")
resolved_at = rr.get("resolved_at")
resolved_date = _to_amsterdam_date(resolved_at) if resolved_at else None
remark_rows.append({"active_from_date": active_from, "resolved_date": resolved_date, "title": rr.get("title"), "body": rr.get("body")})
except Exception:
remark_rows = []
try:
ticket_total_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
"""
),
{"job_id": job.id},
).scalar() or 0
)
ticket_open_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM tickets t
JOIN ticket_scopes ts ON ts.ticket_id = t.id
WHERE ts.job_id = :job_id
AND t.resolved_at IS NULL
"""
),
{"job_id": job.id},
).scalar() or 0
)
except Exception:
ticket_total_count = 0
ticket_open_count = 0
try:
remark_total_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
"""
),
{"job_id": job.id},
).scalar() or 0
)
remark_open_count = (
db.session.execute(
text(
"""
SELECT COUNT(*)
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
AND r.resolved_at IS NULL
"""
),
{"job_id": job.id},
).scalar() or 0
)
except Exception:
remark_total_count = 0
remark_open_count = 0
history_rows = []
for r in runs:
status_display, override_applied, _override_level, _ov_id, _ov_reason = _apply_overrides_to_run(job, r)
rd = run_date_map.get(r.id)
run_day = ""
if rd:
# rd is an Amsterdam-local date
_days = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
try:
run_day = _days[int(rd.weekday())]
except Exception:
run_day = ""
has_ticket = False
has_remark = False
ticket_codes = []
remark_items = []
if rd and ticket_rows:
for tr in ticket_rows:
af = tr.get("active_from_date")
resd = tr.get("resolved_date")
if af and af <= rd and (resd is None or resd >= rd):
has_ticket = True
code = (tr.get("ticket_code") or "").strip()
if code and code not in ticket_codes:
ticket_codes.append(code)
if rd and remark_rows:
for rr in remark_rows:
af = rr.get("active_from_date")
resd = rr.get("resolved_date")
if af and af <= rd and (resd is None or resd >= rd):
has_remark = True
title = (rr.get("title") or "").strip()
body = (rr.get("body") or "").strip()
remark_items.append({"title": title, "body": body})
history_rows.append(
{
"id": r.id,
"run_day": run_day,
"run_at": _format_datetime(r.run_at),
"status": status_display or "",
"remark": r.remark or "",
"missed": bool(r.missed),
"override_applied": bool(override_applied),
"has_ticket": bool(has_ticket),
"has_remark": bool(has_remark),
"ticket_codes": ticket_codes,
"remark_items": remark_items,
"mail_message_id": r.mail_message_id,
"reviewed_by": (r.reviewed_by.username if getattr(r, "reviewed_by", None) else ""),
"reviewed_at": _format_datetime(r.reviewed_at) if r.reviewed_at else "",
}
)
has_prev = page > 1
has_next = page < total_pages
can_manage_jobs = current_user.is_authenticated and get_active_role() in ("admin", "operator")
schedule_map = None
schedule_desc = _describe_schedule(job)
if schedule_desc.startswith("No schedule configured"):
schedule_map = _infer_schedule_map_from_runs(job.id)
schedule_desc = _schedule_map_to_desc(schedule_map)
else:
schedule_map = _infer_schedule_map_from_runs(job.id)
# For convenience, also load customer
customer = None
if job.customer_id:
customer = Customer.query.get(job.customer_id)
return render_template(
"main/job_detail.html",
job=job,
customer=customer,
schedule_desc=schedule_desc,
schedule_map=schedule_map,
history_rows=history_rows,
ticket_open_count=int(ticket_open_count or 0),
ticket_total_count=int(ticket_total_count or 0),
remark_open_count=int(remark_open_count or 0),
remark_total_count=int(remark_total_count or 0),
page=page,
total_pages=total_pages,
has_prev=has_prev,
has_next=has_next,
can_manage_jobs=can_manage_jobs,
)
@main_bp.route("/jobs/<int:job_id>/delete", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def job_delete(job_id: int):
job = Job.query.get_or_404(job_id)
try:
# Collect run ids for FK cleanup in auxiliary tables that may not have ON DELETE CASCADE
run_ids = []
mail_message_ids = []
for run in job.runs:
if run.id is not None:
run_ids.append(run.id)
if run.mail_message_id:
mail_message_ids.append(run.mail_message_id)
# Put related mails back into the inbox and unlink from job
if mail_message_ids:
msgs = MailMessage.query.filter(MailMessage.id.in_(mail_message_ids)).all()
for msg in msgs:
if hasattr(msg, "location"):
msg.location = "inbox"
msg.job_id = None
# Ensure run_object_links doesn't block job_runs deletion (older schemas may miss ON DELETE CASCADE)
if run_ids:
db.session.execute(
text("DELETE FROM run_object_links WHERE run_id IN :run_ids").bindparams(
bindparam("run_ids", expanding=True)
),
{"run_ids": run_ids},
)
# Ensure job_object_links doesn't block jobs deletion (older schemas may miss ON DELETE CASCADE)
if job.id is not None:
db.session.execute(
text("DELETE FROM job_object_links WHERE job_id = :job_id"),
{"job_id": job.id},
)
db.session.delete(job)
db.session.commit()
flash("Job deleted. Related mails are returned to the inbox.", "success")
except Exception as exc:
db.session.rollback()
print(f"[jobs] Failed to delete job: {exc}")
flash("Failed to delete job.", "danger")
return redirect(url_for("main.jobs"))