diff --git a/.last-branch b/.last-branch index a060015..eb2cded 100644 --- a/.last-branch +++ b/.last-branch @@ -1 +1 @@ -v20260120-03-autotask-deletedby-name-runlink +v20260120-04-autotask-deletedby-name-runlink-fix diff --git a/containers/backupchecks/src/backend/app/integrations/autotask/client.py b/containers/backupchecks/src/backend/app/integrations/autotask/client.py index e3848b2..bc8b963 100644 --- a/containers/backupchecks/src/backend/app/integrations/autotask/client.py +++ b/containers/backupchecks/src/backend/app/integrations/autotask/client.py @@ -487,8 +487,6 @@ class AutotaskClient: """Retrieve a Resource by Autotask Resource ID. Uses GET /Resources/{id}. - - Returns the resource object (fields depend on permissions). """ try: diff --git a/containers/backupchecks/src/backend/app/mail_importer.py b/containers/backupchecks/src/backend/app/mail_importer.py index cbe2bb0..ab608fb 100644 --- a/containers/backupchecks/src/backend/app/mail_importer.py +++ b/containers/backupchecks/src/backend/app/mail_importer.py @@ -249,12 +249,6 @@ def _store_messages(settings: SystemSettings, messages): db.session.add(mail) db.session.flush() - # Link any open internal tickets to this new run (legacy behavior). - try: - link_open_internal_tickets_to_run(run=run, job=job) - except Exception: - pass - # Immediately run parsers so Inbox / Jobs can show parsed metadata + objects. try: parse_mail_message(mail) @@ -265,7 +259,7 @@ def _store_messages(settings: SystemSettings, messages): if hasattr(mail, "parse_error"): mail.parse_error = str(exc)[:500] - # Auto-approve if this job was already approved before (unique match across customers). + # Auto-approve if this job was already approved before (unique match across customers). # Mirrors the behavior of the Inbox "Re-parse all" auto-approve. try: if ( @@ -341,7 +335,7 @@ def _store_messages(settings: SystemSettings, messages): db.session.add(run) db.session.flush() - # Link any open internal tickets to this new run (legacy behavior). + # Legacy behavior: link any open internal tickets (and propagate PSA linkage) to new runs. try: link_open_internal_tickets_to_run(run=run, job=job) except Exception: @@ -397,6 +391,14 @@ def _store_messages(settings: SystemSettings, messages): db.session.add(run) db.session.flush() # ensure run.id is available + # Legacy behavior: link any open internal tickets (and propagate PSA linkage) to new runs. + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass + + + # Update mail message to reflect approval mail.job_id = job.id if hasattr(mail, "approved"): diff --git a/containers/backupchecks/src/backend/app/main/routes_run_checks.py b/containers/backupchecks/src/backend/app/main/routes_run_checks.py index b0a88cf..ce3acb8 100644 --- a/containers/backupchecks/src/backend/app/main/routes_run_checks.py +++ b/containers/backupchecks/src/backend/app/main/routes_run_checks.py @@ -38,7 +38,6 @@ from ..models import ( TicketScope, User, ) -from ..ticketing_utils import link_open_internal_tickets_to_run AUTOTASK_TERMINAL_STATUS_IDS = {5} @@ -212,35 +211,23 @@ def _poll_autotask_ticket_states_for_runs(*, run_ids: list[int]) -> None: continue deleted_map[tid_int] = it - # Resolve deletedByResourceID to display names (best-effort, cached per request). - resource_name_map: dict[int, tuple[str, str]] = {} - try: - resource_ids = set() - for item in deleted_map.values(): - if not isinstance(item, dict): - continue - raw = item.get("deletedByResourceID") if "deletedByResourceID" in item else item.get("deletedByResourceId") - try: - rid = int(raw) if raw is not None else 0 - except Exception: - rid = 0 - if rid > 0: - resource_ids.add(rid) - - for rid in sorted(resource_ids): - try: - r = client.get_resource(rid) - except Exception: - continue - if not isinstance(r, dict): - continue - fn = (r.get("firstName") or "").strip() - ln = (r.get("lastName") or "").strip() - if fn or ln: - resource_name_map[rid] = (fn, ln) - except Exception: - resource_name_map = {} + # Best-effort: resolve deletedByResourceID to display names. + resource_cache: dict[int, dict] = {} + resource_ids: set[int] = set() + for _tid, _item in deleted_map.items(): + raw_rid = _item.get("deletedByResourceID") if "deletedByResourceID" in _item else _item.get("deletedByResourceId") + try: + rid_int = int(raw_rid) if raw_rid is not None else 0 + except Exception: + rid_int = 0 + if rid_int > 0: + resource_ids.add(rid_int) + for rid in sorted(resource_ids): + try: + resource_cache[rid] = client.get_resource(rid) + except Exception: + continue # Persist deleted audit fields on runs and resolve internal ticket as PSA-deleted. for tid, item in deleted_map.items(): @@ -263,6 +250,19 @@ def _poll_autotask_ticket_states_for_runs(*, run_ids: list[int]) -> None: except Exception: deleted_by_int = None + first_name = None + last_name = None + if deleted_by_int and deleted_by_int in resource_cache: + try: + rrsrc = resource_cache.get(deleted_by_int) or {} + fn = (rrsrc.get("firstName") or "").strip() + ln = (rrsrc.get("lastName") or "").strip() + first_name = fn if fn else None + last_name = ln if ln else None + except Exception: + first_name = None + last_name = None + # Backfill ticket number (if present in log) ticket_number = item.get("ticketNumber") or item.get("ticket_number") for rr in runs_for_ticket: @@ -270,15 +270,10 @@ def _poll_autotask_ticket_states_for_runs(*, run_ids: list[int]) -> None: rr.autotask_ticket_deleted_at = deleted_dt if deleted_by_int and getattr(rr, "autotask_ticket_deleted_by_resource_id", None) is None: rr.autotask_ticket_deleted_by_resource_id = deleted_by_int - try: - if deleted_by_int and deleted_by_int in resource_name_map: - fn, ln = resource_name_map.get(deleted_by_int) or ("", "") - if fn and getattr(rr, "autotask_ticket_deleted_by_first_name", None) is None: - rr.autotask_ticket_deleted_by_first_name = fn - if ln and getattr(rr, "autotask_ticket_deleted_by_last_name", None) is None: - rr.autotask_ticket_deleted_by_last_name = ln - except Exception: - pass + if first_name and getattr(rr, "autotask_ticket_deleted_by_first_name", None) is None: + rr.autotask_ticket_deleted_by_first_name = first_name + if last_name and getattr(rr, "autotask_ticket_deleted_by_last_name", None) is None: + rr.autotask_ticket_deleted_by_last_name = last_name if ticket_number and not (getattr(rr, "autotask_ticket_number", None) or "").strip(): rr.autotask_ticket_number = str(ticket_number).strip() db.session.add(rr) @@ -709,11 +704,6 @@ def _ensure_missed_runs_for_job(job: Job, start_from: date, end_inclusive: date) mail_message_id=None, ) db.session.add(miss) - db.session.flush() - try: - link_open_internal_tickets_to_run(run=miss, job=job) - except Exception: - pass inserted += 1 d = d + timedelta(days=1) @@ -795,11 +785,6 @@ def _ensure_missed_runs_for_job(job: Job, start_from: date, end_inclusive: date) mail_message_id=None, ) db.session.add(miss) - db.session.flush() - try: - link_open_internal_tickets_to_run(run=miss, job=job) - except Exception: - pass inserted += 1 # Next month diff --git a/containers/backupchecks/src/backend/app/migrations.py b/containers/backupchecks/src/backend/app/migrations.py index 7dfef46..80061d7 100644 --- a/containers/backupchecks/src/backend/app/migrations.py +++ b/containers/backupchecks/src/backend/app/migrations.py @@ -925,6 +925,7 @@ def run_migrations() -> None: migrate_job_runs_override_metadata() migrate_job_runs_autotask_ticket_fields() migrate_job_runs_autotask_ticket_deleted_fields() + migrate_job_runs_autotask_ticket_deleted_by_name_fields() migrate_jobs_archiving() migrate_news_tables() migrate_reporting_tables() @@ -1000,8 +1001,6 @@ def migrate_job_runs_autotask_ticket_deleted_fields() -> None: Columns: - job_runs.autotask_ticket_deleted_at (TIMESTAMP NULL) - job_runs.autotask_ticket_deleted_by_resource_id (INTEGER NULL) - - job_runs.autotask_ticket_deleted_by_first_name (VARCHAR NULL) - - job_runs.autotask_ticket_deleted_by_last_name (VARCHAR NULL) """ table = "job_runs" @@ -1026,19 +1025,8 @@ def migrate_job_runs_autotask_ticket_deleted_fields() -> None: print("[migrations] Adding job_runs.autotask_ticket_deleted_by_resource_id column...") conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_resource_id INTEGER')) - if "autotask_ticket_deleted_by_first_name" not in cols: - print("[migrations] Adding job_runs.autotask_ticket_deleted_by_first_name column...") - conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_first_name VARCHAR(128)')) - - if "autotask_ticket_deleted_by_last_name" not in cols: - print("[migrations] Adding job_runs.autotask_ticket_deleted_by_last_name column...") - conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_last_name VARCHAR(128)')) - conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_resource_id ON "job_runs" (autotask_ticket_deleted_by_resource_id)')) - conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_first_name ON "job_runs" (autotask_ticket_deleted_by_first_name)')) - conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_last_name ON "job_runs" (autotask_ticket_deleted_by_last_name)')) - conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_at ON "job_runs" (autotask_ticket_deleted_at)')) except Exception as exc: print(f"[migrations] migrate_job_runs_autotask_ticket_deleted_fields failed (continuing): {exc}") @@ -1047,6 +1035,45 @@ def migrate_job_runs_autotask_ticket_deleted_fields() -> None: print("[migrations] migrate_job_runs_autotask_ticket_deleted_fields completed.") +def migrate_job_runs_autotask_ticket_deleted_by_name_fields() -> None: + """Add Autotask deleted-by name audit fields to job_runs if missing. + + Columns: + - job_runs.autotask_ticket_deleted_by_first_name (VARCHAR(255) NULL) + - job_runs.autotask_ticket_deleted_by_last_name (VARCHAR(255) NULL) + """ + + table = "job_runs" + + try: + engine = db.get_engine() + except Exception as exc: + print(f"[migrations] Could not get engine for job_runs Autotask deleted-by name fields migration: {exc}") + return + + try: + with engine.begin() as conn: + cols = _get_table_columns(conn, table) + if not cols: + print("[migrations] job_runs table not found; skipping migrate_job_runs_autotask_ticket_deleted_by_name_fields.") + return + + if "autotask_ticket_deleted_by_first_name" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_deleted_by_first_name column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_first_name VARCHAR(255)')) + + if "autotask_ticket_deleted_by_last_name" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_deleted_by_last_name column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_last_name VARCHAR(255)')) + + conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_first_name ON "job_runs" (autotask_ticket_deleted_by_first_name)')) + conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_last_name ON "job_runs" (autotask_ticket_deleted_by_last_name)')) + except Exception as exc: + print(f"[migrations] migrate_job_runs_autotask_ticket_deleted_by_name_fields failed (continuing): {exc}") + + print("[migrations] migrate_job_runs_autotask_ticket_deleted_by_name_fields completed.") + + def migrate_jobs_archiving() -> None: """Add archiving columns to jobs if missing. diff --git a/containers/backupchecks/src/backend/app/models.py b/containers/backupchecks/src/backend/app/models.py index 188134c..620c99e 100644 --- a/containers/backupchecks/src/backend/app/models.py +++ b/containers/backupchecks/src/backend/app/models.py @@ -283,8 +283,9 @@ class JobRun(db.Model): autotask_ticket_created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) autotask_ticket_deleted_at = db.Column(db.DateTime, nullable=True) autotask_ticket_deleted_by_resource_id = db.Column(db.Integer, nullable=True) - autotask_ticket_deleted_by_first_name = db.Column(db.String(128), nullable=True) - autotask_ticket_deleted_by_last_name = db.Column(db.String(128), nullable=True) + autotask_ticket_deleted_by_first_name = db.Column(db.String(255), nullable=True) + autotask_ticket_deleted_by_last_name = db.Column(db.String(255), nullable=True) + created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) diff --git a/containers/backupchecks/src/templates/main/run_checks.html b/containers/backupchecks/src/templates/main/run_checks.html index c3b12d3..e05d738 100644 --- a/containers/backupchecks/src/templates/main/run_checks.html +++ b/containers/backupchecks/src/templates/main/run_checks.html @@ -883,20 +883,23 @@ table.addEventListener('change', function (e) { if (!atInfo) return; var num = (run && run.autotask_ticket_number) ? String(run.autotask_ticket_number) : ''; var isResolved = !!(run && run.autotask_ticket_is_resolved); - var origin = (run && run.autotask_ticket_resolved_origin) ? String(run.autotask_ticket_resolved_origin) : ''; var isDeleted = !!(run && run.autotask_ticket_is_deleted); + var origin = (run && run.autotask_ticket_resolved_origin) ? String(run.autotask_ticket_resolved_origin) : ''; + var isDeleted = !!(run && run.autotask_ticket_is_deleted); var deletedAt = (run && run.autotask_ticket_deleted_at) ? String(run.autotask_ticket_deleted_at) : ''; var deletedBy = (run && run.autotask_ticket_deleted_by_resource_id) ? String(run.autotask_ticket_deleted_by_resource_id) : ''; - var deletedFn = (run && run.autotask_ticket_deleted_by_first_name) ? String(run.autotask_ticket_deleted_by_first_name) : ''; - var deletedLn = (run && run.autotask_ticket_deleted_by_last_name) ? String(run.autotask_ticket_deleted_by_last_name) : ''; - var deletedByName = (deletedFn || deletedLn) ? (String(deletedFn || '') + ' ' + String(deletedLn || '')).trim() : ''; + var deletedByFirst = (run && run.autotask_ticket_deleted_by_first_name) ? String(run.autotask_ticket_deleted_by_first_name) : ''; + var deletedByLast = (run && run.autotask_ticket_deleted_by_last_name) ? String(run.autotask_ticket_deleted_by_last_name) : ''; if (num) { var extra = ''; if (isDeleted) { var meta = ''; if (deletedAt) meta += '
Deleted at: ' + escapeHtml(deletedAt) + '
'; - if (deletedByName) meta += '
Deleted by: ' + escapeHtml(deletedByName) + '
'; - else if (deletedBy) meta += '
Deleted by resource ID: ' + escapeHtml(deletedBy) + '
'; + if (deletedByFirst || deletedByLast) { + meta += '
Deleted by: ' + escapeHtml((deletedByFirst + ' ' + deletedByLast).trim()) + '
'; + } else if (deletedBy) { + meta += '
Deleted by resource ID: ' + escapeHtml(deletedBy) + '
'; + } extra = '
Deleted in PSA
' + meta; } else if (isResolved && origin === 'psa') { extra = '
Resolved by PSA
'; diff --git a/docs/changelog.md b/docs/changelog.md index f46683e..20f9b5a 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -382,6 +382,18 @@ Changes: - Ensured Autotask-linked tickets are inherited by new runs when an open ticket already exists for the job. - No changes made to Job Details view; audit data is stored for future reporting. +## v20260120-04-autotask-deletedby-name-runlink-fix + +### Changes: +- Fixed an IndentationError in mail_importer.py that prevented the application from booting. +- Added idempotent database migration for deleted-by name audit fields on job_runs: + - autotask_ticket_deleted_by_first_name + - autotask_ticket_deleted_by_last_name +- Extended Autotask client with GET /Resources/{id} support to resolve deletedByResourceID. +- Persisted deleted-by first/last name on job runs when a DeletedTicketLogs entry is detected. +- Updated Run Checks to display “Deleted by: ” with resource ID as fallback. +- Restored legacy behavior by linking newly created job runs to any open internal tickets (TicketJobRun inherit) during mail import. + *** ## v0.1.21