Apply pending updates excluding .last-branch

This commit is contained in:
Ivo Oskamp 2026-04-02 17:04:08 +02:00
parent bbfb554411
commit 7874ceecb6
15 changed files with 905 additions and 132 deletions

View File

@ -137,7 +137,7 @@ def api_job_run_alerts(run_id: int):
db.session.execute(
text(
"""
SELECT DISTINCT r.id, r.body, r.start_date, r.resolved_at, r.active_from_date
SELECT DISTINCT r.id, r.body, r.source, r.ticket_id, r.start_date, r.resolved_at, r.active_from_date
FROM remarks r
JOIN remark_job_runs rjr ON rjr.remark_id = r.id
WHERE rjr.job_run_id = :run_id
@ -171,6 +171,8 @@ def api_job_run_alerts(run_id: int):
{
"id": remark_id,
"body": body,
"source": (rr.get("source") or "manual"),
"ticket_id": rr.get("ticket_id"),
"start_date": _format_datetime(rr.get("start_date")) if rr.get("start_date") else "-",
"active_from_date": str(rr.get("active_from_date")) if rr.get("active_from_date") else "",
"resolved_at": _format_datetime(rr.get("resolved_at")) if rr.get("resolved_at") else "",
@ -185,7 +187,7 @@ def api_job_run_alerts(run_id: int):
db.session.execute(
text(
"""
SELECT DISTINCT r.id, r.body, r.start_date, r.resolved_at, r.active_from_date
SELECT DISTINCT r.id, r.body, r.source, r.ticket_id, r.start_date, r.resolved_at, r.active_from_date
FROM remarks r
JOIN remark_scopes rs ON rs.remark_id = r.id
WHERE rs.job_id = :job_id
@ -228,6 +230,8 @@ def api_job_run_alerts(run_id: int):
{
"id": remark_id,
"body": body,
"source": (rr.get("source") or "manual"),
"ticket_id": rr.get("ticket_id"),
"start_date": _format_datetime(rr.get("start_date")) if rr.get("start_date") else "-",
"active_from_date": str(rr.get("active_from_date")) if rr.get("active_from_date") else "",
"resolved_at": _format_datetime(rr.get("resolved_at")) if rr.get("resolved_at") else "",
@ -262,8 +266,6 @@ def api_tickets():
customer_id = 0
query = Ticket.query
if active:
query = query.filter(Ticket.resolved_at.is_(None))
if q:
like_q = f"%{q}%"
query = query.filter(
@ -273,9 +275,43 @@ def api_tickets():
if customer_id:
query = query.join(TicketScope, TicketScope.ticket_id == Ticket.id).filter(TicketScope.customer_id == customer_id)
query = query.order_by(Ticket.start_date.desc()).limit(500)
tickets_raw = query.order_by(Ticket.start_date.desc()).limit(500).all()
ticket_ids = [t.id for t in tickets_raw]
scope_total_map = {}
scope_open_map = {}
if ticket_ids:
try:
rows = (
db.session.execute(
text(
"""
SELECT
ticket_id,
COUNT(*) AS total_count,
SUM(CASE WHEN resolved_at IS NULL THEN 1 ELSE 0 END) AS open_count
FROM ticket_scopes
WHERE ticket_id = ANY(:ids)
GROUP BY ticket_id
"""
),
{"ids": ticket_ids},
)
.fetchall()
)
for tid, total_cnt, open_cnt in rows:
scope_total_map[int(tid)] = int(total_cnt or 0)
scope_open_map[int(tid)] = int(open_cnt or 0)
except Exception:
scope_total_map = {}
scope_open_map = {}
items = []
for t in query.all():
for t in tickets_raw:
total_scopes = int(scope_total_map.get(t.id, 0) or 0)
open_scopes = int(scope_open_map.get(t.id, 0) or 0)
active_effective = (t.resolved_at is None) and (total_scopes == 0 or open_scopes > 0)
if active and not active_effective:
continue
items.append(
{
"id": t.id,
@ -284,7 +320,7 @@ def api_tickets():
"active_from_date": str(getattr(t, "active_from_date", "") or ""),
"start_date": _format_datetime(t.start_date),
"resolved_at": _format_datetime(t.resolved_at) if t.resolved_at else "",
"active": (t.resolved_at is None and TicketScope.query.filter_by(ticket_id=t.id, resolved_at=None).first() is not None),
"active": bool(active_effective),
}
)
return jsonify({"status": "ok", "tickets": items})
@ -538,6 +574,8 @@ def api_remarks():
{
"id": r.id,
"body": r.body or "",
"source": (getattr(r, "source", None) or "manual"),
"ticket_id": getattr(r, "ticket_id", None),
"active_from_date": str(getattr(r, "active_from_date", "") or ""),
"start_date": _format_datetime(r.start_date) if r.start_date else "-",
"resolved_at": _format_datetime(r.resolved_at) if r.resolved_at else "",
@ -569,6 +607,8 @@ def api_remarks():
remark = Remark(
title=None,
body=body,
source="manual",
ticket_id=None,
active_from_date=_to_amsterdam_date(run.run_at) or _to_amsterdam_date(now) or now.date(),
start_date=now,
resolved_at=None,
@ -604,6 +644,8 @@ def api_remarks():
"remark": {
"id": remark.id,
"body": remark.body or "",
"source": (getattr(remark, "source", None) or "manual"),
"ticket_id": getattr(remark, "ticket_id", None),
"start_date": _format_datetime(remark.start_date),
"resolved_at": "",
"active": True,

View File

@ -1,5 +1,12 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime, _get_database_size_bytes, _apply_overrides_to_run, _format_bytes, _get_free_disk_bytes, _infer_schedule_map_from_runs
from .routes_shared import (
_format_datetime,
_get_database_size_bytes,
_apply_overrides_to_run,
_format_bytes,
_get_expected_times_for_job_on_date,
_get_free_disk_bytes,
)
@main_bp.route("/")
@login_required
@ -56,8 +63,6 @@ def dashboard():
)
end_of_day = start_of_day + timedelta(days=1)
weekday_idx = today_date.weekday() # 0=Mon..6=Sun
jobs_success_count = 0
jobs_success_override_count = 0
jobs_expected_count = 0
@ -71,8 +76,7 @@ def dashboard():
jobs = Job.query.join(Customer, isouter=True).all()
for job in jobs:
schedule_map = _infer_schedule_map_from_runs(job.id)
expected_times = schedule_map.get(weekday_idx) or []
expected_times = _get_expected_times_for_job_on_date(job, today_date)
if not expected_times:
continue

View File

@ -1,9 +1,14 @@
from .routes_shared import * # noqa: F401,F403
from .routes_shared import _format_datetime, _get_or_create_settings, _apply_overrides_to_run, _infer_schedule_map_from_runs, _infer_monthly_schedule_from_runs
from .routes_shared import (
_format_datetime,
_get_or_create_settings,
_apply_overrides_to_run,
_get_expected_times_for_job_on_date,
)
# Grace window for today's Expected/Missed transition.
# A job is only marked Missed after the latest expected time plus this grace.
MISSED_GRACE_WINDOW = timedelta(hours=1)
MISSED_GRACE_WINDOW = timedelta(hours=3)
@main_bp.route("/daily-jobs")
@login_required
@ -87,8 +92,6 @@ def daily_jobs():
minute_bucket = (d.minute // 15) * 15
return f"{d.hour:02d}:{minute_bucket:02d}"
weekday_idx = target_date.weekday() # 0=Mon..6=Sun
jobs_query = (
Job.query.join(Customer, isouter=True)
.filter(Job.archived.is_(False))
@ -110,24 +113,7 @@ def daily_jobs():
rows = []
for job in jobs:
schedule_map = _infer_schedule_map_from_runs(job.id)
expected_times = schedule_map.get(weekday_idx) or []
# If no weekly schedule is inferred (e.g. monthly jobs), try monthly inference.
if not expected_times:
monthly = _infer_monthly_schedule_from_runs(job.id)
if monthly:
dom = int(monthly.get("day_of_month") or 0)
mtimes = monthly.get("times") or []
# For months shorter than dom, treat the last day of month as the scheduled day.
try:
import calendar as _calendar
last_dom = _calendar.monthrange(target_date.year, target_date.month)[1]
except Exception:
last_dom = target_date.day
scheduled_dom = dom if (dom and dom <= last_dom) else last_dom
if target_date.day == scheduled_dom:
expected_times = list(mtimes)
expected_times = _get_expected_times_for_job_on_date(job, target_date)
if not expected_times:
continue

View File

@ -3,12 +3,16 @@ from .routes_shared import (
_apply_overrides_to_run,
_describe_schedule,
_format_datetime,
_get_effective_schedule_for_job,
_get_ui_timezone_name,
_infer_schedule_map_from_runs,
_parse_schedule_times_csv,
_schedule_map_to_desc,
_to_amsterdam_date,
)
_WEEKDAY_LABELS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
@main_bp.route("/jobs")
@login_required
@roles_required("admin", "operator", "viewer")
@ -231,12 +235,106 @@ def job_set_cove_account(job_id: int):
return redirect(url_for("main.job_detail", job_id=job_id))
@main_bp.route("/jobs/<int:job_id>/schedule", methods=["POST"])
@login_required
@roles_required("admin", "operator")
def job_set_schedule(job_id: int):
"""Save or clear manual schedule override for this job."""
job = Job.query.get_or_404(job_id)
if (request.form.get("clear_schedule") or "").strip() == "1":
job.schedule_type = None
job.schedule_days_of_week = None
job.schedule_day_of_month = None
job.schedule_times = None
db.session.commit()
try:
log_admin_event(
"job_schedule_cleared",
f"Cleared manual schedule override for job {job.id}",
details=f"job_name={job.job_name}",
)
except Exception:
pass
flash("Manual schedule override removed. Inferred schedule is active again.", "success")
return redirect(url_for("main.job_detail", job_id=job_id))
schedule_type = (request.form.get("schedule_type") or "").strip().lower()
times_raw = (request.form.get("schedule_times") or "").strip()
times = _parse_schedule_times_csv(times_raw)
if not times:
flash("Invalid schedule times. Use comma-separated HH:MM values, e.g. 01:00,13:15.", "warning")
return redirect(url_for("main.job_detail", job_id=job_id))
if schedule_type not in ("daily", "weekly", "monthly"):
flash("Invalid schedule type. Choose Daily, Weekly, or Monthly.", "warning")
return redirect(url_for("main.job_detail", job_id=job_id))
day_labels: list[str] = []
day_of_month: int | None = None
if schedule_type == "weekly":
raw_days = request.form.getlist("schedule_weekdays")
idxs: list[int] = []
for value in raw_days:
try:
idx = int(str(value).strip())
except Exception:
continue
if 0 <= idx <= 6 and idx not in idxs:
idxs.append(idx)
idxs = sorted(idxs)
if not idxs:
flash("Weekly schedule requires at least one day.", "warning")
return redirect(url_for("main.job_detail", job_id=job_id))
day_labels = [_WEEKDAY_LABELS[i] for i in idxs]
if schedule_type == "monthly":
dom_raw = (request.form.get("schedule_day_of_month") or "").strip()
try:
day_of_month = int(dom_raw)
except Exception:
day_of_month = None
if day_of_month is None or day_of_month < 1 or day_of_month > 31:
flash("Monthly schedule requires a day of month between 1 and 31.", "warning")
return redirect(url_for("main.job_detail", job_id=job_id))
job.schedule_type = schedule_type
job.schedule_times = ",".join(times)
job.schedule_days_of_week = ",".join(day_labels) if day_labels else None
job.schedule_day_of_month = day_of_month if schedule_type == "monthly" else None
db.session.commit()
try:
details = f"type={schedule_type}; times={job.schedule_times}; days={job.schedule_days_of_week}; dom={job.schedule_day_of_month}"
log_admin_event(
"job_schedule_set",
f"Set manual schedule override for job {job.id}",
details=details,
)
except Exception:
pass
flash("Manual schedule override saved.", "success")
return redirect(url_for("main.job_detail", job_id=job_id))
@main_bp.route("/jobs/<int:job_id>")
@login_required
@roles_required("admin", "operator", "viewer")
def job_detail(job_id: int):
job = Job.query.get_or_404(job_id)
first_detected_run_at = (
db.session.query(func.min(JobRun.run_at))
.filter(
JobRun.job_id == job.id,
JobRun.run_at.isnot(None),
JobRun.missed.is_(False),
)
.scalar()
)
# History pagination
try:
page = int(request.args.get("page", "1"))
@ -523,13 +621,24 @@ def job_detail(job_id: int):
can_manage_jobs = current_user.is_authenticated and get_active_role() in ("admin", "operator")
schedule_map = None
schedule_desc = _describe_schedule(job)
if schedule_desc.startswith("No schedule configured"):
schedule_map = _infer_schedule_map_from_runs(job.id)
schedule_desc = _schedule_map_to_desc(schedule_map)
inferred_schedule_map = _infer_schedule_map_from_runs(job.id)
effective_schedule = _get_effective_schedule_for_job(job)
effective_source = effective_schedule.get("source") or "none"
effective_weekly_map = effective_schedule.get("weekly_map") or {i: [] for i in range(7)}
effective_monthly = effective_schedule.get("monthly")
if effective_source == "inferred_weekly":
schedule_desc = _schedule_map_to_desc(effective_weekly_map)
elif effective_source == "inferred_monthly" and effective_monthly:
dom = effective_monthly.get("day_of_month")
mtimes = effective_monthly.get("times") or []
if mtimes:
schedule_desc = f"Inferred monthly on day {dom} at {', '.join(mtimes)}."
else:
schedule_map = _infer_schedule_map_from_runs(job.id)
schedule_desc = f"Inferred monthly on day {dom}."
elif effective_source == "none":
schedule_desc = _schedule_map_to_desc(inferred_schedule_map)
# For convenience, also load customer
customer = None
@ -545,8 +654,12 @@ def job_detail(job_id: int):
"main/job_detail.html",
job=job,
customer=customer,
first_detected_run_at=first_detected_run_at,
schedule_desc=schedule_desc,
schedule_map=schedule_map,
schedule_map=inferred_schedule_map,
effective_schedule_source=effective_source,
effective_weekly_schedule_map=effective_weekly_map,
effective_monthly_schedule=effective_monthly,
history_rows=history_rows,
ticket_open_count=int(ticket_open_count or 0),
ticket_total_count=int(ticket_total_count or 0),

View File

@ -13,12 +13,11 @@ from sqlalchemy import and_, or_, func, text
from .routes_shared import (
_apply_overrides_to_run,
_get_effective_schedule_for_job,
_format_datetime,
_get_ui_timezone,
_get_ui_timezone_name,
_get_or_create_settings,
_infer_schedule_map_from_runs,
_infer_monthly_schedule_from_runs,
_to_amsterdam_date,
main_bp,
roles_required,
@ -35,6 +34,9 @@ from ..models import (
MailMessage,
MailObject,
Override,
Remark,
RemarkJobRun,
RemarkScope,
Ticket,
TicketJobRun,
TicketScope,
@ -44,6 +46,7 @@ from ..ticketing_utils import link_open_internal_tickets_to_run
AUTOTASK_TERMINAL_STATUS_IDS = {5}
BACKUPCHECKS_RESOLVE_MARKER = "[Backupchecks] Marked as resolved in Backupchecks"
RUN_CHECKS_SORT_MODES = {"customer", "status"}
# ---------------------------------------------------------------------------
@ -276,6 +279,148 @@ def _resolve_internal_ticket_for_job(
db.session.add(TicketJobRun(ticket_id=ticket.id, job_run_id=rid, link_source="autotask"))
def _extract_autotask_resolution_text(ticket_payload: dict | None) -> str:
if not isinstance(ticket_payload, dict):
return ""
preferred = [
"resolution",
"resolutionText",
"resolution_text",
"resolutionNote",
"resolutionNotes",
]
for key in preferred:
val = ticket_payload.get(key)
txt = str(val or "").strip()
if txt:
return txt
for key, val in ticket_payload.items():
key_l = str(key or "").strip().lower()
if "resolution" not in key_l:
continue
txt = str(val or "").strip()
if txt:
return txt
return ""
def _maybe_create_autotask_resolution_remark(
*,
ticket_payload: dict | None,
ticket_id: int,
runs_for_ticket: list[JobRun],
now: datetime,
) -> None:
"""Persist PSA resolution text as an active internal remark (deduplicated)."""
if not runs_for_ticket:
return
resolution_text = _extract_autotask_resolution_text(ticket_payload)
if not resolution_text:
return
if BACKUPCHECKS_RESOLVE_MARKER in resolution_text:
# Do not mirror Backupchecks-generated resolve notes back as remarks.
return
job = Job.query.get(runs_for_ticket[0].job_id) if runs_for_ticket else None
if not job:
return
ticket_number = ""
if isinstance(ticket_payload, dict):
ticket_number = str(
ticket_payload.get("ticketNumber")
or ticket_payload.get("number")
or ticket_payload.get("ticket_number")
or ""
).strip()
if not ticket_number:
for rr in runs_for_ticket:
code = str(getattr(rr, "autotask_ticket_number", "") or "").strip()
if code:
ticket_number = code
break
active_from_dt = None
try:
dts = [getattr(x, "run_at", None) for x in runs_for_ticket if getattr(x, "run_at", None)]
active_from_dt = min(dts) if dts else None
except Exception:
active_from_dt = None
internal_ticket = _ensure_internal_ticket_for_autotask(
ticket_number=ticket_number,
job=job,
run_ids=[int(x.id) for x in runs_for_ticket if getattr(x, "id", None)],
now=now,
active_from_dt=active_from_dt,
)
if (getattr(internal_ticket, "resolved_origin", None) or "").strip().lower() == "backupchecks":
return
internal_ticket_id = getattr(internal_ticket, "id", None)
exists = (
db.session.query(Remark.id)
.join(RemarkScope, RemarkScope.remark_id == Remark.id)
.filter(Remark.source == "autotask_resolution")
.filter(Remark.body == resolution_text)
.filter(RemarkScope.job_id == job.id)
)
if internal_ticket_id:
exists = exists.filter(Remark.ticket_id == int(internal_ticket_id))
if exists.first():
return
active_from_date = _to_amsterdam_date(active_from_dt or now) or (active_from_dt or now).date()
title = (
f"Autotask resolution ({ticket_number})"
if ticket_number
else f"Autotask resolution (ID {int(ticket_id)})"
)
remark = Remark(
title=title,
body=resolution_text,
source="autotask_resolution",
ticket_id=(int(internal_ticket_id) if internal_ticket_id else None),
active_from_date=active_from_date,
start_date=now,
resolved_at=None,
)
db.session.add(remark)
db.session.flush()
db.session.add(
RemarkScope(
remark_id=remark.id,
scope_type="job",
customer_id=job.customer_id,
backup_software=job.backup_software,
backup_type=job.backup_type,
job_id=job.id,
job_name_match=job.job_name,
job_name_match_mode="exact",
)
)
for rr in runs_for_ticket:
rid = int(getattr(rr, "id", 0) or 0)
if rid <= 0:
continue
if not RemarkJobRun.query.filter_by(remark_id=remark.id, job_run_id=rid).first():
db.session.add(
RemarkJobRun(
remark_id=remark.id,
job_run_id=rid,
link_source="autotask_resolution",
)
)
def _poll_autotask_ticket_states_for_runs(*, run_ids: list[int]) -> None:
"""Phase 2: Read-only PSA-driven ticket completion sync.
@ -570,6 +715,27 @@ def _poll_autotask_ticket_states_for_runs(*, run_ids: list[int]) -> None:
origin="psa",
)
# Mirror Autotask resolution text into active internal remarks for follow-up visibility.
for tid in ticket_ids:
if tid in deleted_map:
continue
runs_for_ticket = ticket_to_runs.get(tid) or []
if not runs_for_ticket:
continue
try:
ticket_payload = client.get_ticket(int(tid))
except Exception:
continue
try:
_maybe_create_autotask_resolution_remark(
ticket_payload=ticket_payload,
ticket_id=int(tid),
runs_for_ticket=runs_for_ticket,
now=now,
)
except Exception:
continue
try:
db.session.commit()
except Exception:
@ -750,8 +916,8 @@ def _compose_autotask_link_existing_note(
# Grace window for matching real runs to an expected schedule slot.
# A run within +/- 1 hour of the inferred schedule time counts as fulfilling the slot.
MISSED_GRACE_WINDOW = timedelta(hours=1)
# A run within +/- 3 hours of the inferred schedule time counts as fulfilling the slot.
MISSED_GRACE_WINDOW = timedelta(hours=3)
def _status_is_success(status: str | None) -> bool:
@ -817,12 +983,10 @@ def _ensure_missed_runs_for_job(job: Job, start_from: date, end_inclusive: date)
return 0
tz = _get_ui_timezone()
schedule_map = _infer_schedule_map_from_runs(job.id) or {}
resolved_schedule = _get_effective_schedule_for_job(job)
schedule_map = resolved_schedule.get("weekly_map") or {i: [] for i in range(7)}
has_weekly_times = any((schedule_map.get(i) or []) for i in range(7))
monthly = None
if not has_weekly_times:
monthly = _infer_monthly_schedule_from_runs(job.id)
monthly = resolved_schedule.get("monthly")
if (not has_weekly_times) and (not monthly):
return 0
@ -2404,7 +2568,7 @@ def api_run_checks_autotask_resolve_note():
ticket_number = str(getattr(run, "autotask_ticket_number", "") or "").strip()
# Build dynamic message based on time entry check
marker = "[Backupchecks] Marked as resolved in Backupchecks"
marker = BACKUPCHECKS_RESOLVE_MARKER
if has_time_entries:
status_note = "(ticket remains open in Autotask due to existing time entries)"
else:

View File

@ -3,9 +3,8 @@ from .routes_shared import (
_apply_overrides_to_run,
_format_datetime,
_get_or_create_settings,
_get_expected_times_for_job_on_date,
_get_ui_timezone,
_infer_monthly_schedule_from_runs,
_infer_schedule_map_from_runs,
)
from sqlalchemy import and_, cast, func, or_, String
@ -382,23 +381,8 @@ def _build_daily_jobs_results(patterns: list[str], page: int) -> dict:
)
_enrich_paging(section, total, current_page, total_pages)
for row in rows:
expected_times = (_infer_schedule_map_from_runs(row.job_id).get(target_date.weekday()) or [])
if not expected_times:
monthly = _infer_monthly_schedule_from_runs(row.job_id)
if monthly:
try:
dom = int(monthly.get("day_of_month") or 0)
except Exception:
dom = 0
mtimes = monthly.get("times") or []
try:
import calendar as _calendar
last_dom = _calendar.monthrange(target_date.year, target_date.month)[1]
except Exception:
last_dom = target_date.day
scheduled_dom = dom if (dom and dom <= last_dom) else last_dom
if target_date.day == scheduled_dom:
expected_times = list(mtimes)
job_obj = Job.query.get(int(row.job_id))
expected_times = _get_expected_times_for_job_on_date(job_obj, target_date)
runs_for_day = (
JobRun.query.filter(
@ -416,7 +400,6 @@ def _build_daily_jobs_results(patterns: list[str], page: int) -> dict:
if run_count > 0:
last_run = runs_for_day[-1]
try:
job_obj = Job.query.get(int(row.job_id))
status_display, _override_applied, _override_level, _ov_id, _ov_reason = _apply_overrides_to_run(job_obj, last_run)
if getattr(last_run, "missed", False):
last_status = status_display or "Missed"

View File

@ -655,7 +655,9 @@ def _infer_schedule_map_from_runs(job_id: int):
Returns dict weekday->sorted list of 'HH:MM' strings in configured UI local time.
Notes:
- Only considers real runs that came from mail reports (mail_message_id is not NULL).
- Considers real runs from:
- mail reports (mail_message_id is not NULL), and
- Cove API imports (source_type == "cove_api").
- Synthetic missed rows never influence schedule inference.
- To reduce noise, a weekday/time bucket must occur at least MIN_OCCURRENCES times.
"""
@ -691,7 +693,8 @@ def _infer_schedule_map_from_runs(job_id: int):
pass
try:
# Only infer schedules from real runs that came from mail reports.
# Only infer schedules from real runs that came from mail reports
# or Cove API imports.
# Synthetic "Missed" rows must never influence schedule inference.
# Limit to the last 90 days so that schedule changes (different day,
# time, or frequency) take effect quickly and do not leave stale slots
@ -703,7 +706,10 @@ def _infer_schedule_map_from_runs(job_id: int):
JobRun.job_id == job_id,
JobRun.run_at.isnot(None),
JobRun.missed.is_(False),
or_(
JobRun.mail_message_id.isnot(None),
JobRun.source_type == "cove_api",
),
JobRun.run_at >= cutoff_utc,
)
.order_by(JobRun.run_at.desc())
@ -782,14 +788,17 @@ def _infer_monthly_schedule_from_runs(job_id: int):
or None if not enough evidence.
Rules:
- Uses only real mail-based runs (mail_message_id is not NULL) and excludes synthetic missed rows.
- Uses only real runs from mail and Cove API imports:
- mail-based runs (mail_message_id is not NULL)
- Cove API runs (source_type == "cove_api")
and excludes synthetic missed rows.
- Requires at least MIN_OCCURRENCES occurrences for the inferred day-of-month.
- Uses a simple cadence heuristic: typical gaps between runs must be >= 20 days to qualify as monthly.
"""
MIN_OCCURRENCES = 3
try:
# Same "real run" rule as weekly inference.
# Same "real run" rule as weekly inference (mail + Cove API).
# 180 days gives ~6 occurrences for a monthly job (enough for
# MIN_OCCURRENCES=3) while still discarding stale schedule data.
cutoff_utc = datetime.utcnow() - timedelta(days=180)
@ -799,7 +808,10 @@ def _infer_monthly_schedule_from_runs(job_id: int):
JobRun.job_id == job_id,
JobRun.run_at.isnot(None),
JobRun.missed.is_(False),
or_(
JobRun.mail_message_id.isnot(None),
JobRun.source_type == "cove_api",
),
JobRun.run_at >= cutoff_utc,
)
.order_by(JobRun.run_at.asc())
@ -899,6 +911,161 @@ def _infer_monthly_schedule_from_runs(job_id: int):
return {"day_of_month": int(best_dom), "times": keep_times}
def _parse_schedule_times_csv(raw: str | None) -> list[str]:
out: list[str] = []
seen: set[str] = set()
for part in str(raw or "").split(","):
token = part.strip()
if not token:
continue
m = re.match(r"^(\d{1,2}):(\d{2})$", token)
if not m:
continue
hh = int(m.group(1))
mm = int(m.group(2))
if hh < 0 or hh > 23 or mm < 0 or mm > 59:
continue
norm = f"{hh:02d}:{mm:02d}"
if norm in seen:
continue
seen.add(norm)
out.append(norm)
return sorted(out)
def _parse_schedule_days_csv(raw: str | None) -> list[int]:
day_map = {
"mon": 0,
"monday": 0,
"tue": 1,
"tues": 1,
"tuesday": 1,
"wed": 2,
"wednesday": 2,
"thu": 3,
"thurs": 3,
"thursday": 3,
"fri": 4,
"friday": 4,
"sat": 5,
"saturday": 5,
"sun": 6,
"sunday": 6,
}
out: list[int] = []
seen: set[int] = set()
for part in str(raw or "").split(","):
token = part.strip().lower()
if not token:
continue
wd = None
if token.isdigit():
v = int(token)
if 0 <= v <= 6:
wd = v
else:
wd = day_map.get(token)
if wd is None or wd in seen:
continue
seen.add(wd)
out.append(wd)
return sorted(out)
def _get_manual_schedule_for_job(job: Job | None):
"""Return normalized manual schedule payload or None when not configured/invalid."""
if not job:
return None
stype = str(getattr(job, "schedule_type", "") or "").strip().lower()
if not stype:
return None
times = _parse_schedule_times_csv(getattr(job, "schedule_times", None))
if not times:
return None
if stype == "daily":
weekly_map = {i: list(times) for i in range(7)}
return {"mode": "weekly", "weekly_map": weekly_map}
if stype == "weekly":
weekly_map = {i: [] for i in range(7)}
for wd in _parse_schedule_days_csv(getattr(job, "schedule_days_of_week", None)):
weekly_map[wd] = list(times)
if not any(weekly_map.get(i) for i in range(7)):
return None
return {"mode": "weekly", "weekly_map": weekly_map}
if stype == "monthly":
try:
dom = int(getattr(job, "schedule_day_of_month", None) or 0)
except Exception:
dom = 0
if dom < 1 or dom > 31:
return None
return {"mode": "monthly", "monthly": {"day_of_month": dom, "times": list(times)}}
return None
def _get_effective_schedule_for_job(job: Job | None):
"""Resolve schedule with precedence: manual override first, inferred fallback."""
empty_weekly = {i: [] for i in range(7)}
if not job:
return {"source": "none", "weekly_map": empty_weekly, "monthly": None}
manual = _get_manual_schedule_for_job(job)
if manual:
if manual.get("mode") == "weekly":
return {"source": "manual", "weekly_map": manual.get("weekly_map") or empty_weekly, "monthly": None}
return {"source": "manual", "weekly_map": empty_weekly, "monthly": manual.get("monthly")}
inferred_weekly = _infer_schedule_map_from_runs(job.id) or empty_weekly
has_weekly = any((inferred_weekly.get(i) or []) for i in range(7))
if has_weekly:
return {"source": "inferred_weekly", "weekly_map": inferred_weekly, "monthly": None}
monthly = _infer_monthly_schedule_from_runs(job.id)
if monthly:
return {"source": "inferred_monthly", "weekly_map": empty_weekly, "monthly": monthly}
return {"source": "none", "weekly_map": inferred_weekly, "monthly": None}
def _get_expected_times_for_job_on_date(job: Job | None, target_date) -> list[str]:
"""Return expected HH:MM slots for a job on a specific date using effective schedule."""
if not job or not target_date:
return []
resolved = _get_effective_schedule_for_job(job)
weekly_map = resolved.get("weekly_map") or {}
expected_times = list(weekly_map.get(int(target_date.weekday())) or [])
if expected_times:
return expected_times
monthly = resolved.get("monthly")
if not monthly:
return []
try:
dom = int(monthly.get("day_of_month") or 0)
except Exception:
dom = 0
mtimes = monthly.get("times") or []
if dom <= 0 or not mtimes:
return []
try:
last_dom = calendar.monthrange(target_date.year, target_date.month)[1]
except Exception:
last_dom = target_date.day
scheduled_dom = dom if dom <= last_dom else last_dom
if int(target_date.day) != int(scheduled_dom):
return []
return list(mtimes)
def _schedule_map_to_desc(schedule_map):
weekday_names = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
any_times = any(schedule_map.get(i) for i in range(7))

View File

@ -29,8 +29,6 @@ def tickets_page():
if tab == "tickets":
query = Ticket.query
joined_scope = False
if active_only:
query = query.filter(Ticket.resolved_at.is_(None))
if q:
like_q = f"%{q}%"
query = (
@ -68,6 +66,8 @@ def tickets_page():
ticket_ids = [t.id for t in tickets_raw]
customer_map = {}
run_count_map = {}
scope_total_map = {}
scope_open_map = {}
if ticket_ids:
try:
@ -113,6 +113,31 @@ def tickets_page():
except Exception:
run_count_map = {}
try:
rows = (
db.session.execute(
text(
"""
SELECT
ticket_id,
COUNT(*) AS total_count,
SUM(CASE WHEN resolved_at IS NULL THEN 1 ELSE 0 END) AS open_count
FROM ticket_scopes
WHERE ticket_id = ANY(:ids)
GROUP BY ticket_id
"""
),
{"ids": ticket_ids},
)
.fetchall()
)
for tid, total_cnt, open_cnt in rows:
scope_total_map[int(tid)] = int(total_cnt or 0)
scope_open_map[int(tid)] = int(open_cnt or 0)
except Exception:
scope_total_map = {}
scope_open_map = {}
for t in tickets_raw:
customers_for_ticket = customer_map.get(t.id) or []
if customers_for_ticket:
@ -141,6 +166,11 @@ def tickets_page():
scope_summary = " / ".join([p for p in parts if p]) or "-"
except Exception:
scope_summary = "-"
total_scopes = int(scope_total_map.get(t.id, 0) or 0)
open_scopes = int(scope_open_map.get(t.id, 0) or 0)
active_effective = (t.resolved_at is None) and (total_scopes == 0 or open_scopes > 0)
if active_only and not active_effective:
continue
tickets.append(
{
@ -150,7 +180,7 @@ def tickets_page():
"active_from_date": str(getattr(t, "active_from_date", "") or ""),
"start_date": _format_datetime(t.start_date),
"resolved_at": _format_datetime(t.resolved_at) if t.resolved_at else "",
"active": t.resolved_at is None,
"active": bool(active_effective),
"customers": customer_display,
"scope_summary": scope_summary,
"linked_runs": run_count_map.get(t.id, 0),

View File

@ -631,6 +631,46 @@ def migrate_remarks_active_from_date() -> None:
print("[migrations] remarks.active_from_date added and backfilled.")
def migrate_remarks_source_and_ticket_id() -> None:
"""Ensure remarks.source and remarks.ticket_id exist."""
table = "remarks"
try:
engine = db.get_engine()
except Exception as exc:
print(f"[migrations] Could not get engine for remarks source/ticket migration: {exc}")
return
try:
with engine.begin() as conn:
if not _column_exists_on_conn(conn, table, "source"):
conn.execute(text('ALTER TABLE "remarks" ADD COLUMN source VARCHAR(64)'))
conn.execute(
text(
"""
UPDATE "remarks"
SET source = 'manual'
WHERE source IS NULL OR source = '';
"""
)
)
try:
conn.execute(text('ALTER TABLE "remarks" ALTER COLUMN source SET NOT NULL'))
except Exception:
pass
if not _column_exists_on_conn(conn, table, "ticket_id"):
conn.execute(text('ALTER TABLE "remarks" ADD COLUMN ticket_id INTEGER REFERENCES tickets(id)'))
conn.execute(text('CREATE INDEX IF NOT EXISTS idx_remarks_source ON remarks (source)'))
conn.execute(text('CREATE INDEX IF NOT EXISTS idx_remarks_ticket_id ON remarks (ticket_id)'))
except Exception as exc:
print(f"[migrations] Failed migrate_remarks_source_and_ticket_id: {exc}")
return
print("[migrations] migrate_remarks_source_and_ticket_id completed.")
def migrate_overrides_match_columns() -> None:
"""Add match_status / match_error columns to overrides table if missing."""
engine = db.get_engine()
@ -1428,6 +1468,7 @@ def run_migrations() -> None:
migrate_tickets_active_from_date()
migrate_tickets_resolved_origin()
migrate_remarks_active_from_date()
migrate_remarks_source_and_ticket_id()
migrate_overrides_match_columns()
migrate_job_runs_review_tracking()
migrate_job_runs_override_metadata()
@ -2220,6 +2261,8 @@ def migrate_object_persistence_tables() -> None:
id SERIAL PRIMARY KEY,
title VARCHAR(255),
body TEXT NOT NULL,
source VARCHAR(64) NOT NULL DEFAULT 'manual',
ticket_id INTEGER REFERENCES tickets(id),
start_date TIMESTAMP,
resolved_at TIMESTAMP,
created_at TIMESTAMP NOT NULL,

View File

@ -584,6 +584,8 @@ class Remark(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255))
body = db.Column(db.Text, nullable=False)
source = db.Column(db.String(64), nullable=False, default="manual")
ticket_id = db.Column(db.Integer, db.ForeignKey("tickets.id"), nullable=True)
# Date (Europe/Amsterdam) from which this remark should be considered active
# for the scoped job(s) in Daily Jobs / Job Details views.

View File

@ -22,6 +22,59 @@
<dt class="col-sm-3">Remarks</dt>
<dd class="col-sm-9">{{ remark_open_count }} open / {{ remark_total_count }} total</dd>
<dt class="col-sm-3">First backup detected</dt>
<dd class="col-sm-9">
{% if first_detected_run_at %}
{{ first_detected_run_at|local_datetime }}
{% else %}
Not detected yet
{% endif %}
</dd>
<dt class="col-sm-3">Schedule (effective)</dt>
<dd class="col-sm-9">
{% if effective_schedule_source == 'manual' %}
<span class="badge bg-primary-subtle text-primary-emphasis border border-primary-subtle">Manual override</span>
{% elif effective_schedule_source == 'inferred_weekly' %}
<span class="badge bg-secondary-subtle text-secondary-emphasis border border-secondary-subtle">Inferred (weekly)</span>
{% elif effective_schedule_source == 'inferred_monthly' %}
<span class="badge bg-secondary-subtle text-secondary-emphasis border border-secondary-subtle">Inferred (monthly)</span>
{% else %}
<span class="badge bg-secondary text-white border border-secondary">No schedule</span>
{% endif %}
<div class="mt-2 small">{{ schedule_desc }}</div>
</dd>
{% if effective_weekly_schedule_map and (effective_weekly_schedule_map[0] or effective_weekly_schedule_map[1] or effective_weekly_schedule_map[2] or effective_weekly_schedule_map[3] or effective_weekly_schedule_map[4] or effective_weekly_schedule_map[5] or effective_weekly_schedule_map[6]) %}
<dt class="col-sm-3">Effective schedule</dt>
<dd class="col-sm-9">
<div class="table-responsive">
<table class="table table-sm table-bordered mb-0">
<thead class="table-light">
<tr><th style="width: 120px;">Day</th><th>Times (15 min blocks)</th></tr>
</thead>
<tbody>
<tr><td>Mon</td><td>{{ ', '.join(effective_weekly_schedule_map[0]) if effective_weekly_schedule_map[0] else '—' }}</td></tr>
<tr><td>Tue</td><td>{{ ', '.join(effective_weekly_schedule_map[1]) if effective_weekly_schedule_map[1] else '—' }}</td></tr>
<tr><td>Wed</td><td>{{ ', '.join(effective_weekly_schedule_map[2]) if effective_weekly_schedule_map[2] else '—' }}</td></tr>
<tr><td>Thu</td><td>{{ ', '.join(effective_weekly_schedule_map[3]) if effective_weekly_schedule_map[3] else '—' }}</td></tr>
<tr><td>Fri</td><td>{{ ', '.join(effective_weekly_schedule_map[4]) if effective_weekly_schedule_map[4] else '—' }}</td></tr>
<tr><td>Sat</td><td>{{ ', '.join(effective_weekly_schedule_map[5]) if effective_weekly_schedule_map[5] else '—' }}</td></tr>
<tr><td>Sun</td><td>{{ ', '.join(effective_weekly_schedule_map[6]) if effective_weekly_schedule_map[6] else '—' }}</td></tr>
</tbody>
</table>
</div>
</dd>
{% endif %}
{% if effective_monthly_schedule %}
<dt class="col-sm-3">Effective monthly</dt>
<dd class="col-sm-9">
Day {{ effective_monthly_schedule.day_of_month }} at {{ ', '.join(effective_monthly_schedule.times or []) }}
</dd>
{% endif %}
{% if schedule_map %}
<dt class="col-sm-3">Schedule (inferred)</dt>
<dd class="col-sm-9">
@ -48,6 +101,86 @@
</div>
{% if can_manage_jobs %}
<div class="card mb-3">
<div class="card-header d-flex justify-content-between align-items-center">
<span>Schedule Override</span>
<button
class="btn btn-sm btn-outline-secondary"
type="button"
data-bs-toggle="collapse"
data-bs-target="#scheduleOverridePanel"
aria-expanded="false"
aria-controls="scheduleOverridePanel"
>
Open
</button>
</div>
<div class="collapse" id="scheduleOverridePanel">
<div class="card-body">
<form method="post" action="{{ url_for('main.job_set_schedule', job_id=job.id) }}" class="row g-3">
<div class="col-md-4">
<label for="schedule_type" class="form-label">Type</label>
<select class="form-select" id="schedule_type" name="schedule_type">
<option value="daily" {% if (job.schedule_type or '')|lower == 'daily' %}selected{% endif %}>Daily</option>
<option value="weekly" {% if (job.schedule_type or '')|lower == 'weekly' %}selected{% endif %}>Weekly</option>
<option value="monthly" {% if (job.schedule_type or '')|lower == 'monthly' %}selected{% endif %}>Monthly</option>
</select>
</div>
<div class="col-md-8">
<label for="schedule_times" class="form-label">Times (HH:MM, comma separated)</label>
<input
type="text"
class="form-control"
id="schedule_times"
name="schedule_times"
value="{{ job.schedule_times or '' }}"
placeholder="01:00,13:15"
required
/>
</div>
<div class="col-md-12">
<label class="form-label">Weekdays (for weekly)</label>
<div class="d-flex flex-wrap gap-3">
{% set selected_weekdays = ((job.schedule_days_of_week or '').split(',')) %}
{% for idx, label in [(0, 'Mon'), (1, 'Tue'), (2, 'Wed'), (3, 'Thu'), (4, 'Fri'), (5, 'Sat'), (6, 'Sun')] %}
<div class="form-check form-check-inline">
<input
class="form-check-input"
type="checkbox"
name="schedule_weekdays"
id="schedule_weekday_{{ idx }}"
value="{{ idx }}"
{% if label in selected_weekdays %}checked{% endif %}
/>
<label class="form-check-label" for="schedule_weekday_{{ idx }}">{{ label }}</label>
</div>
{% endfor %}
</div>
</div>
<div class="col-md-4">
<label for="schedule_day_of_month" class="form-label">Day of month (for monthly)</label>
<input
type="number"
class="form-control"
id="schedule_day_of_month"
name="schedule_day_of_month"
min="1"
max="31"
value="{{ job.schedule_day_of_month or '' }}"
/>
</div>
<div class="col-12 d-flex flex-wrap gap-2">
<button type="submit" class="btn btn-primary">Save override</button>
<button type="submit" name="clear_schedule" value="1" class="btn btn-outline-secondary">Use inferred schedule</button>
</div>
<div class="col-12 small text-muted">
Manual schedule override is leading for Expected/Missed and missed-run generation.
</div>
</form>
</div>
</div>
</div>
<div class="d-flex flex-wrap gap-2 mb-3">
<form method="post" action="{{ url_for('main.archive_job', job_id=job.id) }}" class="mb-0" onsubmit="return confirm('Archive this job? No new runs are expected and it will be removed from Daily Jobs and Run Checks.');">
<button type="submit" class="btn btn-outline-secondary">Archive</button>
@ -366,47 +499,54 @@
// Cross-browser copy to clipboard function
function copyToClipboard(text, button) {
// Method 1: Modern Clipboard API (works in most browsers with HTTPS)
if (navigator.clipboard && navigator.clipboard.writeText) {
navigator.clipboard.writeText(text)
var value = (text || "").toString().trim();
if (!value) {
alert("No ticket number available to copy.");
return;
}
if (window.isSecureContext && navigator.clipboard && navigator.clipboard.writeText) {
navigator.clipboard.writeText(value)
.then(function () {
showCopyFeedback(button);
})
.catch(function () {
// Fallback to method 2 if clipboard API fails
fallbackCopy(text, button);
fallbackCopy(value, button);
});
} else {
// Method 2: Legacy execCommand method
fallbackCopy(text, button);
return;
}
fallbackCopy(value, button);
}
function fallbackCopy(text, button) {
var textarea = document.createElement('textarea');
var textarea = document.createElement("textarea");
textarea.value = text;
textarea.style.position = 'fixed';
textarea.style.opacity = '0';
textarea.style.top = '0';
textarea.style.left = '0';
textarea.setAttribute("readonly", "readonly");
textarea.style.position = "fixed";
textarea.style.opacity = "0";
textarea.style.top = "0";
textarea.style.left = "0";
document.body.appendChild(textarea);
textarea.focus();
textarea.select();
textarea.setSelectionRange(0, text.length);
var successful = false;
try {
var successful = document.execCommand('copy');
if (successful) {
showCopyFeedback(button);
} else {
// If execCommand fails, use prompt as last resort
window.prompt('Copy ticket number:', text);
}
successful = document.execCommand("copy");
} catch (err) {
// If all else fails, show prompt
window.prompt('Copy ticket number:', text);
successful = false;
}
document.body.removeChild(textarea);
if (successful) {
showCopyFeedback(button);
return;
}
window.prompt("Copy ticket number:", text);
}
function showCopyFeedback(button) {
@ -502,13 +642,13 @@
Array.prototype.forEach.call(box.querySelectorAll('button[data-action]'), function (btn) {
btn.addEventListener('click', function (ev) {
ev.preventDefault();
ev.stopPropagation();
var action = btn.getAttribute('data-action');
var id = btn.getAttribute('data-id');
if (!action) return;
if (action === 'copy-ticket') {
var code = btn.getAttribute('data-code') || '';
if (!code) return;
var code = (btn.getAttribute('data-code') || '').trim();
copyToClipboard(code, btn);
return;
}

View File

@ -174,11 +174,16 @@
@media (min-width: 1400px) { .modal-xxl { max-width: 1400px; } }
#runChecksModal .modal-content {
height: 90vh;
height: min(90vh, calc(100dvh - 1rem));
max-height: calc(100dvh - 1rem);
display: flex;
flex-direction: column;
}
#runChecksModal .modal-dialog {
margin: 0.5rem auto;
}
#runChecksModal .modal-body {
overflow: hidden;
flex: 1 1 auto;
@ -236,8 +241,40 @@
#runChecksModal.is-cove .rcm-objects-scroll {
max-height: 55vh;
}
</style>
@media (max-width: 991.98px) {
#runChecksModal .modal-dialog {
max-width: calc(100vw - 1rem);
margin: 0.5rem;
}
#runChecksModal .modal-content {
height: calc(100dvh - 1rem);
max-height: calc(100dvh - 1rem);
}
#runChecksModal .modal-body {
overflow: auto;
}
#runChecksModal #rcm_content,
#runChecksModal .rcm-main-row,
#runChecksModal .rcm-main-row > .col-md-3,
#runChecksModal .rcm-detail-col {
height: auto;
min-height: initial;
}
#runChecksModal #rcm_runs_list {
max-height: 28vh;
}
#runChecksModal .rcm-objects-scroll,
#runChecksModal.is-cove .rcm-objects-scroll {
max-height: none;
}
}
</style>
<div class="modal fade" id="runChecksModal" tabindex="-1" aria-labelledby="runChecksModalLabel" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable modal-xxl">
<div class="modal-content">
@ -563,47 +600,54 @@ function escapeHtml(s) {
// Cross-browser copy to clipboard function
function copyToClipboard(text, button) {
// Method 1: Modern Clipboard API (works in most browsers with HTTPS)
if (navigator.clipboard && navigator.clipboard.writeText) {
navigator.clipboard.writeText(text)
var value = (text || "").toString().trim();
if (!value) {
alert("No ticket number available to copy.");
return;
}
if (window.isSecureContext && navigator.clipboard && navigator.clipboard.writeText) {
navigator.clipboard.writeText(value)
.then(function () {
showCopyFeedback(button);
})
.catch(function () {
// Fallback to method 2 if clipboard API fails
fallbackCopy(text, button);
fallbackCopy(value, button);
});
} else {
// Method 2: Legacy execCommand method
fallbackCopy(text, button);
return;
}
fallbackCopy(value, button);
}
function fallbackCopy(text, button) {
var textarea = document.createElement('textarea');
var textarea = document.createElement("textarea");
textarea.value = text;
textarea.style.position = 'fixed';
textarea.style.opacity = '0';
textarea.style.top = '0';
textarea.style.left = '0';
textarea.setAttribute("readonly", "readonly");
textarea.style.position = "fixed";
textarea.style.opacity = "0";
textarea.style.top = "0";
textarea.style.left = "0";
document.body.appendChild(textarea);
textarea.focus();
textarea.select();
textarea.setSelectionRange(0, text.length);
var successful = false;
try {
var successful = document.execCommand('copy');
if (successful) {
showCopyFeedback(button);
} else {
// If execCommand fails, use prompt as last resort
window.prompt('Copy ticket number:', text);
}
successful = document.execCommand("copy");
} catch (err) {
// If all else fails, show prompt
window.prompt('Copy ticket number:', text);
successful = false;
}
document.body.removeChild(textarea);
if (successful) {
showCopyFeedback(button);
return;
}
window.prompt("Copy ticket number:", text);
}
function showCopyFeedback(button) {
@ -977,12 +1021,18 @@ table.addEventListener('change', function (e) {
html += '<div class="mb-2"><strong>Remarks</strong><div class="mt-1">';
remarks.forEach(function (r) {
var status = r.resolved_at ? 'Resolved' : 'Active';
var source = (r && r.source) ? String(r.source) : 'manual';
var sourceBadge = '';
if (source === 'autotask_resolution') {
sourceBadge = '<span class="ms-2 badge bg-info text-dark">Autotask</span>';
}
html += '<div class="mb-2 border rounded p-2" data-alert-type="remark" data-id="' + r.id + '">' +
'<div class="d-flex align-items-start justify-content-between gap-2">' +
'<div class="flex-grow-1 min-w-0">' +
'<div class="text-truncate">' +
'<span class="me-1" title="Remark">💬</span>' +
'<span class="fw-semibold">Remark</span>' +
sourceBadge +
'<span class="ms-2 badge ' + (r.resolved_at ? 'bg-secondary' : 'bg-warning text-dark') + '">' + status + '</span>' +
'</div>' +
(r.body ? ('<div class="small text-muted mt-1">' + escapeHtml(r.body) + '</div>') : '') +
@ -1001,13 +1051,13 @@ table.addEventListener('change', function (e) {
Array.prototype.forEach.call(box.querySelectorAll('button[data-action]'), function (btn) {
btn.addEventListener('click', function (ev) {
ev.preventDefault();
ev.stopPropagation();
var action = btn.getAttribute('data-action');
var id = btn.getAttribute('data-id');
if (!action) return;
if (action === 'copy-ticket') {
var code = btn.getAttribute('data-code') || '';
if (!code) return;
var code = (btn.getAttribute('data-code') || '').trim();
copyToClipboard(code, btn);
return;
}

View File

@ -2,7 +2,7 @@
**Branch:** `v20260206-10-audit-logging-expansion`
**Datum:** 2026-02-07
**Status:** Deel 1 compleet, deel 2 nog te doen
**Status:** Volledig afgerond op 2026-03-26 (Deel 1 + Deel 2)
---
@ -23,7 +23,9 @@
---
## 🔄 Wat moet nog (Deel 2)
## ✅ Deel 2 afgerond
Alle onderstaande punten zijn uitgevoerd en opgenomen in de codebase en changelog van deze oplevering.
### 1. UI Updates

View File

@ -2,6 +2,48 @@
This file documents all changes made to this project via Claude Code.
## [2026-04-02]
### Added
- Job Details now supports manual schedule overrides (Daily/Weekly/Monthly) via `POST /jobs/<job_id>/schedule`:
- Operators/admins can save a manual schedule or clear it to fall back to inferred schedule.
- Effective schedule source is shown in Job Details (`manual`, `inferred weekly`, `inferred monthly`).
- Job Details now shows `First backup detected` using the earliest non-missed run timestamp for the job, to make historical reporting horizon visible.
### Fixed
- Increased missed-run grace/tolerance window from `±1 hour` to `±3 hours` to better handle DST and larger execution-time drift:
- Updated `MISSED_GRACE_WINDOW` in `containers/backupchecks/src/backend/app/main/routes_run_checks.py` to `timedelta(hours=3)` for missed-run generation and duplicate/fulfillment checks.
- Updated `MISSED_GRACE_WINDOW` in `containers/backupchecks/src/backend/app/main/routes_daily_jobs.py` to `timedelta(hours=3)` so Daily Jobs Expected/Missed transitions stay aligned with Run Checks logic.
- Effective schedule resolution now prioritizes manual job schedule over inferred schedule for operational views and missed-run generation:
- Daily Jobs, Search and Dashboard expected/missed calculations now use the effective (manual-first) schedule.
- Run Checks missed-run sweep now generates/removes missed slots from the effective schedule instead of inference-only logic.
- Job Details schedule UI polish:
- `No schedule` badge now uses a dark background with white text for readable contrast in dark-themed pages.
- `Schedule Override` panel is now collapsed by default and can be expanded on demand.
- Schedule inference now also includes Cove API runs (`source_type='cove_api'`) instead of only mail-linked runs, so Cove jobs can get inferred weekly/monthly schedules.
## [2026-03-30]
### Added
- Autotask resolution sync now persists PSA resolution text as internal active remarks for follow-up visibility:
- Added `remarks.source` and `remarks.ticket_id` in models and migrations.
- New source value `autotask_resolution` is used for remarks created from Autotask ticket resolution content.
- Remarks are linked to job scope and all linked runs (`remark_scopes` + `remark_job_runs`) and remain active (not auto-resolved).
- Deduplication prevents creating duplicate remarks for the same job/ticket/resolution text.
### Fixed
- Autotask-to-remark mirroring now skips Backupchecks-generated resolve updates:
- Marker-based skip for `[Backupchecks] Marked as resolved in Backupchecks`.
- Additional guard to skip when the internal ticket resolution origin is `backupchecks`.
- Run Checks remark cards now show an `Autotask` badge for remarks with `source=autotask_resolution`.
## [2026-03-27]
### Fixed
- Run Checks modal (Cove runs) now remains fully usable on smaller resolutions: the modal height is capped to the visible viewport (`100dvh`), mobile/tablet layout can scroll inside the modal body, and rigid full-height column constraints are relaxed under 992px so content and footer stay reachable.
- Tickets pages/API now compute effective Active/Resolved status from both `tickets.resolved_at` and `ticket_scopes` open/closed state; tickets with all scopes resolved no longer remain incorrectly shown as Active.
- Run Checks and Job Detail ticket copy action (`⧉`) was hardened: click handling now stops propagation and the clipboard fallback path is more robust (secure Clipboard API first, then `execCommand`, then prompt).
## [2026-03-26]
### Fixed

View File

@ -60,6 +60,11 @@ Implemented in `backend/app/migrations.py`:
- Adds `report_definitions.report_config` (TEXT) if it does not exist.
- Stores the JSON report definition for the reporting UI (selected columns, chart types, filters) so the same definition can later be reused for PDF export.
- `migrate_remarks_source_and_ticket_id()`
- Adds `remarks.source` (VARCHAR(64), backfilled to `manual`) if it does not exist.
- Adds `remarks.ticket_id` (INTEGER, FK to `tickets.id`) if it does not exist.
- Adds indexes for source and ticket-based filtering (`idx_remarks_source`, `idx_remarks_ticket_id`).
## Future changes
- Every time you introduce a non-trivial schema change, update: