diff --git a/.last-branch b/.last-branch index 2da1383..36e7b9e 100644 --- a/.last-branch +++ b/.last-branch @@ -1 +1 @@ -v20260113-08-vspc-object-linking-normalize +v20260203-01-autotask-resolve-note diff --git a/containers/backupchecks/src/backend/app/integrations/autotask/__init__.py b/containers/backupchecks/src/backend/app/integrations/autotask/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/containers/backupchecks/src/backend/app/integrations/autotask/client.py b/containers/backupchecks/src/backend/app/integrations/autotask/client.py new file mode 100644 index 0000000..36f0af5 --- /dev/null +++ b/containers/backupchecks/src/backend/app/integrations/autotask/client.py @@ -0,0 +1,697 @@ +import json +from dataclasses import dataclass +from typing import Any, Dict, List, Optional + +import requests + + +@dataclass +class AutotaskZoneInfo: + zone_name: str + api_url: str + web_url: Optional[str] = None + ci: Optional[int] = None + + +class AutotaskError(RuntimeError): + def __init__(self, message: str, status_code: Optional[int] = None) -> None: + super().__init__(message) + self.status_code = status_code + + +class AutotaskClient: + def __init__( + self, + username: str, + password: str, + api_integration_code: str, + environment: str = "production", + timeout_seconds: int = 30, + ) -> None: + self.username = username + self.password = password + self.api_integration_code = api_integration_code + self.environment = (environment or "production").strip().lower() + self.timeout_seconds = timeout_seconds + + self._zone_info: Optional[AutotaskZoneInfo] = None + self._zoneinfo_base_used: Optional[str] = None + + def _zoneinfo_bases(self) -> List[str]: + """Return a list of zoneInformation base URLs to try. + + Autotask tenants can behave differently for Sandbox vs Production. + To keep connection testing reliable, we try the expected base first + and fall back to the alternative if needed. + """ + prod = "https://webservices.autotask.net/atservicesrest" + sb = "https://webservices2.autotask.net/atservicesrest" + if self.environment == "sandbox": + return [sb, prod] + return [prod, sb] + + def get_zone_info(self) -> AutotaskZoneInfo: + if self._zone_info is not None: + return self._zone_info + + last_error: Optional[str] = None + data: Optional[Dict[str, Any]] = None + for base in self._zoneinfo_bases(): + url = f"{base.rstrip('/')}/v1.0/zoneInformation" + params = {"user": self.username} + try: + resp = requests.get(url, params=params, timeout=self.timeout_seconds) + except Exception as exc: + last_error = f"ZoneInformation request failed for {base}: {exc}" + continue + + if resp.status_code >= 400: + last_error = f"ZoneInformation request failed for {base} (HTTP {resp.status_code})." + continue + + try: + data = resp.json() + except Exception: + last_error = f"ZoneInformation response from {base} is not valid JSON." + continue + + self._zoneinfo_base_used = base + break + + if data is None: + raise AutotaskError(last_error or "ZoneInformation request failed.") + + zone = AutotaskZoneInfo( + zone_name=str(data.get("zoneName") or ""), + api_url=str(data.get("url") or "").rstrip("/"), + web_url=(str(data.get("webUrl") or "").rstrip("/") or None), + ci=(int(data["ci"]) if str(data.get("ci") or "").isdigit() else None), + ) + + if not zone.api_url: + raise AutotaskError("ZoneInformation did not return an API URL.") + + self._zone_info = zone + return zone + + def _headers(self) -> Dict[str, str]: + # Autotask REST API requires the ApiIntegrationCode header. + # Some tenants/proxies appear picky despite headers being case-insensitive, + # so we include both common casings for maximum compatibility. + return { + "ApiIntegrationCode": self.api_integration_code, + "APIIntegrationcode": self.api_integration_code, + "Content-Type": "application/json", + "Accept": "application/json", + } + + def _request( + self, + method: str, + path: str, + params: Optional[Dict[str, Any]] = None, + json_body: Optional[Dict[str, Any]] = None, + ) -> Any: + zone = self.get_zone_info() + base = zone.api_url.rstrip("/") + url = f"{base}/v1.0/{path.lstrip('/')}" + headers = self._headers() + + def do_request(use_basic_auth: bool, extra_headers: Optional[Dict[str, str]] = None): + h = dict(headers) + if extra_headers: + h.update(extra_headers) + return requests.request( + method=method.upper(), + url=url, + headers=h, + params=params or None, + json=json_body if json_body is not None else None, + auth=(self.username, self.password) if use_basic_auth else None, + timeout=self.timeout_seconds, + ) + + try: + # Primary auth method: HTTP Basic (username + API secret) + resp = do_request(use_basic_auth=True) + + # Compatibility fallback: some environments accept credentials only via headers. + if resp.status_code == 401: + resp = do_request( + use_basic_auth=False, + extra_headers={"UserName": self.username, "Secret": self.password}, + ) + except Exception as exc: + raise AutotaskError(f"Request failed: {exc}") from exc + + if resp.status_code == 401: + zi_base = self._zoneinfo_base_used or "unknown" + raise AutotaskError( + "Authentication failed (HTTP 401). " + "Verify API Username, API Secret, and ApiIntegrationCode. " + f"Environment={self.environment}, ZoneInfoBase={zi_base}, ZoneApiUrl={zone.api_url}." + , + status_code=401, + ) + if resp.status_code == 403: + raise AutotaskError( + "Access forbidden (HTTP 403). API user permissions may be insufficient.", + status_code=403, + ) + if resp.status_code == 404: + raise AutotaskError(f"Resource not found (HTTP 404) for path: {path}", status_code=404) + if resp.status_code >= 400: + raise AutotaskError(f"Autotask API error (HTTP {resp.status_code}).", status_code=resp.status_code) + + try: + return resp.json() + except Exception as exc: + raise AutotaskError("Autotask API response is not valid JSON.") from exc + + def _as_items_list(self, payload: Any) -> List[Dict[str, Any]]: + """Normalize common Autotask REST payload shapes to a list of dicts.""" + if payload is None: + return [] + + if isinstance(payload, list): + return [x for x in payload if isinstance(x, dict)] + + if isinstance(payload, dict): + items = payload.get("items") + if isinstance(items, list): + return [x for x in items if isinstance(x, dict)] + + fields = payload.get("fields") + if isinstance(fields, list): + return [x for x in fields if isinstance(x, dict)] + + # Some endpoints may return a single object. + if "id" in payload: + return [payload] + + return [] + + def _get_collection(self, resource_name: str) -> List[Dict[str, Any]]: + """Fetch a reference collection via GET /. + + Note: Not all Autotask entities support /query. Reference data like Queues and + TicketSources is typically retrieved via a simple collection GET. + """ + data = self._request("GET", resource_name) + return self._as_items_list(data) + + def _get_entity_fields(self, entity_name: str) -> List[Dict[str, Any]]: + data = self._request("GET", f"{entity_name}/entityInformation/fields") + return self._as_items_list(data) + + def _call_picklist_values(self, picklist_values_path: str) -> List[Dict[str, Any]]: + # picklistValues path can be returned as a full URL or as a relative path. + path = (picklist_values_path or "").strip() + if not path: + return [] + + # If a full URL is returned, strip everything up to /v1.0/ + if "/v1.0/" in path: + path = path.split("/v1.0/", 1)[1] + # If it includes the base API URL without /v1.0, strip to resource path. + if "/atservicesrest/" in path and "/v1.0/" not in picklist_values_path: + # Fallback: attempt to strip after atservicesrest/ + path = path.split("/atservicesrest/", 1)[1] + if path.startswith("v1.0/"): + path = path.split("v1.0/", 1)[1] + + data = self._request("GET", path) + return self._as_items_list(data) + + def get_queues(self) -> List[Dict[str, Any]]: + """Return Ticket Queue picklist values. + + Autotask does not expose a universal top-level Queues entity in all tenants. + The reliable source is the Tickets.queueID picklist metadata. + """ + return self._get_ticket_picklist_values(field_names=["queueid", "queue"]) + + def get_ticket_sources(self) -> List[Dict[str, Any]]: + """Return Ticket Source picklist values. + + Similar to queues, Ticket Source values are best retrieved via the + Tickets.source picklist metadata to avoid relying on optional entities. + """ + return self._get_ticket_picklist_values(field_names=["source", "sourceid"]) + + def search_companies(self, query: str, limit: int = 25) -> List[Dict[str, Any]]: + """Search Companies by company name. + + Uses the standard REST query endpoint: + GET /Companies/query?search={...} + + Returns a minimal list of dicts with keys: id, companyName, isActive. + """ + + q = (query or "").strip() + if not q: + return [] + + # Keep payload small and predictable. + # Field names in filters are case-insensitive in many tenants, but the docs + # commonly show CompanyName. + search_payload: Dict[str, Any] = { + "filter": [ + {"op": "contains", "field": "CompanyName", "value": q}, + ], + "maxRecords": int(limit) if int(limit) > 0 else 25, + } + + params = {"search": json.dumps(search_payload)} + data = self._request("GET", "Companies/query", params=params) + items = self._as_items_list(data) + + out: List[Dict[str, Any]] = [] + for it in items: + if not isinstance(it, dict): + continue + cid = it.get("id") + name = it.get("companyName") or it.get("CompanyName") or "" + try: + cid_int = int(cid) + except Exception: + continue + out.append( + { + "id": cid_int, + "companyName": str(name), + "isActive": bool(it.get("isActive", True)), + } + ) + + out.sort(key=lambda x: (x.get("companyName") or "").lower()) + return out + + def get_company(self, company_id: int) -> Dict[str, Any]: + """Fetch a single Company by ID.""" + return self._request("GET", f"Companies/{int(company_id)}") + + def _get_ticket_picklist_values(self, field_names: List[str]) -> List[Dict[str, Any]]: + """Retrieve picklist values for a Tickets field. + + Autotask field metadata can vary between tenants/environments. + We first try exact name matches, then fall back to a contains-match + on the metadata field name/label for picklist fields. + """ + + fields = self._get_entity_fields("Tickets") + wanted = {n.strip().lower() for n in (field_names or []) if str(n).strip()} + + def _field_label(f: Dict[str, Any]) -> str: + # Autotask metadata commonly provides either "label" or "displayName". + return str(f.get("label") or f.get("displayName") or "").strip().lower() + + field: Optional[Dict[str, Any]] = None + + # 1) Exact name match + for f in fields: + name = str(f.get("name") or "").strip().lower() + if name in wanted: + field = f + break + + # 2) Fallback: contains match for picklists (handles QueueID vs TicketQueueID etc.) + if field is None and wanted: + candidates: List[Dict[str, Any]] = [] + for f in fields: + if not bool(f.get("isPickList")): + continue + name = str(f.get("name") or "").strip().lower() + label = _field_label(f) + if any(w in name for w in wanted) or any(w in label for w in wanted): + candidates.append(f) + + if candidates: + # Prefer the most specific/shortest name match to avoid overly broad matches. + candidates.sort(key=lambda x: len(str(x.get("name") or ""))) + field = candidates[0] + + if not field: + raise AutotaskError( + "Unable to locate Tickets field metadata for picklist retrieval: " + f"{sorted(wanted)}" + ) + + if not bool(field.get("isPickList")): + raise AutotaskError(f"Tickets.{field.get('name')} is not marked as a picklist in Autotask metadata.") + + picklist_values = field.get("picklistValues") + # Autotask may return picklist values inline (as a list) or as a URL/path. + if isinstance(picklist_values, list): + return [x for x in picklist_values if isinstance(x, dict)] + + if not isinstance(picklist_values, str) or not picklist_values.strip(): + raise AutotaskError(f"Tickets.{field.get('name')} metadata did not include picklist values.") + + return self._call_picklist_values(picklist_values) + + def get_ticket_priorities(self) -> List[Dict[str, Any]]: + """Return Ticket Priority picklist values. + + We intentionally retrieve this from entity metadata to prevent hardcoded priority IDs. + """ + fields = self._get_entity_fields("Tickets") + priority_field: Optional[Dict[str, Any]] = None + + def _field_label(f: Dict[str, Any]) -> str: + return str(f.get("label") or f.get("displayName") or "").strip().lower() + + # Exact match first + for f in fields: + name = str(f.get("name") or "").strip().lower() + if name == "priority": + priority_field = f + break + + # Fallback: contains match (handles variations like TicketPriority) + if priority_field is None: + candidates: List[Dict[str, Any]] = [] + for f in fields: + if not bool(f.get("isPickList")): + continue + name = str(f.get("name") or "").strip().lower() + label = _field_label(f) + if "priority" in name or "priority" in label: + candidates.append(f) + if candidates: + candidates.sort(key=lambda x: len(str(x.get("name") or ""))) + priority_field = candidates[0] + + if not priority_field: + raise AutotaskError("Unable to locate a Tickets priority picklist field in Autotask metadata.") + + if not bool(priority_field.get("isPickList")): + raise AutotaskError("Tickets.priority is not marked as a picklist in Autotask metadata.") + + picklist_values = priority_field.get("picklistValues") + if isinstance(picklist_values, list): + return [x for x in picklist_values if isinstance(x, dict)] + + if not isinstance(picklist_values, str) or not picklist_values.strip(): + raise AutotaskError("Tickets.priority metadata did not include picklist values.") + + return self._call_picklist_values(picklist_values) + + def get_ticket_statuses(self) -> List[Dict[str, Any]]: + """Return Ticket Status picklist values. + + We retrieve this from Tickets field metadata to avoid hardcoded status IDs. + """ + return self._get_ticket_picklist_values(field_names=["status", "statusid"]) + + def create_ticket(self, payload: Dict[str, Any]) -> Dict[str, Any]: + """Create a Ticket in Autotask. + + Uses POST /Tickets. + Returns the created ticket object (as returned by Autotask). + """ + if not isinstance(payload, dict) or not payload: + raise AutotaskError("Ticket payload is empty.") + + data = self._request("POST", "Tickets", json_body=payload) + # Autotask commonly returns only an itemId on create. + # We normalize that into a dict with an "id" key so callers can + # perform a follow-up GET /Tickets/{id} to retrieve the full object + # and the human-facing ticketNumber. + if isinstance(data, dict): + if "item" in data and isinstance(data.get("item"), dict): + return data["item"] + if "items" in data and isinstance(data.get("items"), list) and data.get("items"): + first = data.get("items")[0] + if isinstance(first, dict): + return first + # Autotask create responses often look like: {"itemId": 12345} + item_id = data.get("itemId") + if item_id is None: + item_id = data.get("itemID") + if item_id is not None: + try: + tid = int(item_id) + except Exception: + tid = 0 + if tid > 0: + return {"id": tid} + if "id" in data: + return data + # Fallback: return normalized first item if possible + items = self._as_items_list(data) + if items: + return items[0] + + raise AutotaskError("Autotask did not return a created ticket object.") + + + def get_ticket(self, ticket_id: int) -> Dict[str, Any]: + """Retrieve a Ticket by Autotask Ticket ID. + + Uses GET /Tickets/{id}. + + This is the authoritative retrieval method and is mandatory after creation, + because the create response does not reliably include the human-facing + ticket number. + """ + + try: + tid = int(ticket_id) + except Exception: + raise AutotaskError("Invalid ticket id.") + + if tid <= 0: + raise AutotaskError("Invalid ticket id.") + + data = self._request("GET", f"Tickets/{tid}") + if isinstance(data, dict): + if "item" in data and isinstance(data.get("item"), dict): + return data["item"] + if "items" in data and isinstance(data.get("items"), list) and data.get("items"): + first = data.get("items")[0] + if isinstance(first, dict): + return first + # Some environments return the raw object + if "id" in data or "ticketNumber" in data or "number" in data: + return data + + items = self._as_items_list(data) + if items: + return items[0] + + raise AutotaskError("Autotask did not return a ticket object.") + + + def update_ticket(self, ticket_payload: Dict[str, Any]) -> Dict[str, Any]: + """Update a Ticket via PUT /Tickets. + + Autotask does not support PATCH for Tickets. PUT behaves as a full update. + Callers must construct a valid payload (typically by copying required fields + from a fresh GET /Tickets/{id} response) and changing only intended fields. + """ + + if not isinstance(ticket_payload, dict): + raise AutotaskError("Invalid ticket payload.") + + try: + tid = int(ticket_payload.get("id") or 0) + except Exception: + tid = 0 + if tid <= 0: + raise AutotaskError("Invalid ticket id in payload.") + + data = self._request("PUT", "Tickets", json_body=ticket_payload) + if isinstance(data, dict): + if "item" in data and isinstance(data.get("item"), dict): + return data["item"] + if "items" in data and isinstance(data.get("items"), list) and data.get("items"): + first = data.get("items")[0] + if isinstance(first, dict): + return first + # Some environments return the raw object + if "id" in data: + return data + + items = self._as_items_list(data) + if items: + return items[0] + + # PUT may return an empty body in some tenants; treat as success. + return {"id": tid} + + + def get_resource(self, resource_id: int) -> Dict[str, Any]: + """Retrieve a Resource by Autotask Resource ID. + + Uses GET /Resources/{id}. + """ + + try: + rid = int(resource_id) + except Exception: + raise AutotaskError("Invalid resource id.") + + if rid <= 0: + raise AutotaskError("Invalid resource id.") + + data = self._request("GET", f"Resources/{rid}") + if isinstance(data, dict): + if "item" in data and isinstance(data.get("item"), dict): + return data["item"] + if "items" in data and isinstance(data.get("items"), list) and data.get("items"): + first = data.get("items")[0] + if isinstance(first, dict): + return first + if "id" in data or "firstName" in data or "lastName" in data: + return data + + items = self._as_items_list(data) + if items: + return items[0] + + raise AutotaskError("Autotask did not return a resource object.") + + + def query_deleted_ticket_logs_by_ticket_ids(self, ticket_ids: List[int]) -> List[Dict[str, Any]]: + """Query DeletedTicketLogs for a set of ticket IDs. + + Uses POST /DeletedTicketLogs/query. + + Returns list items including ticketID, ticketNumber, deletedByResourceID, deletedDateTime. + """ + + ids: List[int] = [] + for x in ticket_ids or []: + try: + v = int(x) + except Exception: + continue + if v > 0: + ids.append(v) + + if not ids: + return [] + + # Field name differs across docs/tenants (ticketID vs ticketId). + # Autotask query field matching is case-insensitive in most tenants; we use the common ticketID. + payload = { + "filter": [ + {"op": "in", "field": "ticketID", "value": ids}, + ] + } + + data = self._request("POST", "DeletedTicketLogs/query", json_body=payload) + return self._as_items_list(data) + + def query_tickets_by_ids( + self, + ticket_ids: List[int], + *, + exclude_status_ids: Optional[List[int]] = None, + ) -> List[Dict[str, Any]]: + """Query Tickets by ID, optionally excluding statuses. + + Uses POST /Tickets/query. + + Note: + - This endpoint is not authoritative (tickets can be missing). + - Call get_ticket(id) as a fallback for missing IDs. + """ + + ids: List[int] = [] + for x in ticket_ids or []: + try: + v = int(x) + except Exception: + continue + if v > 0: + ids.append(v) + + if not ids: + return [] + + flt: List[Dict[str, Any]] = [ + { + "op": "in", + "field": "id", + "value": ids, + } + ] + + ex: List[int] = [] + for x in exclude_status_ids or []: + try: + v = int(x) + except Exception: + continue + if v > 0: + ex.append(v) + + if ex: + flt.append( + { + "op": "notIn", + "field": "status", + "value": ex, + } + ) + + data = self._request("POST", "Tickets/query", json_body={"filter": flt}) + return self._as_items_list(data) + + def query_tickets_for_company( + self, + company_id: int, + *, + search: str = "", + exclude_status_ids: Optional[List[int]] = None, + limit: int = 50, + ) -> List[Dict[str, Any]]: + """Query Tickets for a specific company, optionally searching by ticket number or title. + + Uses POST /Tickets/query. + + Note: + - Autotask query operators vary by tenant; we use common operators (eq, contains). + - If the query fails due to operator support, callers should fall back to get_ticket(id). + """ + + try: + cid = int(company_id) + except Exception: + cid = 0 + if cid <= 0: + return [] + + flt: List[Dict[str, Any]] = [ + {"op": "eq", "field": "companyID", "value": cid}, + ] + + ex: List[int] = [] + for x in exclude_status_ids or []: + try: + v = int(x) + except Exception: + continue + if v > 0: + ex.append(v) + if ex: + flt.append({"op": "notIn", "field": "status", "value": ex}) + + q = (search or "").strip() + if q: + # Ticket numbers in Autotask are typically like T20260119.0004 + if q.upper().startswith("T") and any(ch.isdigit() for ch in q): + flt.append({"op": "eq", "field": "ticketNumber", "value": q.strip()}) + else: + # Broad search on title + flt.append({"op": "contains", "field": "title", "value": q}) + + data = self._request("POST", "Tickets/query", json_body={"filter": flt}) + items = self._as_items_list(data) + + # Respect limit if tenant returns more. + if limit and isinstance(limit, int) and limit > 0: + return items[: int(limit)] + return items diff --git a/containers/backupchecks/src/backend/app/mail_importer.py b/containers/backupchecks/src/backend/app/mail_importer.py index d2479c7..ab608fb 100644 --- a/containers/backupchecks/src/backend/app/mail_importer.py +++ b/containers/backupchecks/src/backend/app/mail_importer.py @@ -16,6 +16,7 @@ from .parsers import parse_mail_message from .parsers.veeam import extract_vspc_active_alarms_companies from .email_utils import normalize_from_address, extract_best_html_from_eml, is_effectively_blank_html from .job_matching import find_matching_job +from .ticketing_utils import link_open_internal_tickets_to_run GRAPH_TOKEN_URL_TEMPLATE = "https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token" @@ -258,7 +259,7 @@ def _store_messages(settings: SystemSettings, messages): if hasattr(mail, "parse_error"): mail.parse_error = str(exc)[:500] - # Auto-approve if this job was already approved before (unique match across customers). + # Auto-approve if this job was already approved before (unique match across customers). # Mirrors the behavior of the Inbox "Re-parse all" auto-approve. try: if ( @@ -334,6 +335,12 @@ def _store_messages(settings: SystemSettings, messages): db.session.add(run) db.session.flush() + # Legacy behavior: link any open internal tickets (and propagate PSA linkage) to new runs. + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass + auto_approved_runs.append((job.customer_id, job.id, run.id, mail.id)) created_any = True @@ -384,6 +391,14 @@ def _store_messages(settings: SystemSettings, messages): db.session.add(run) db.session.flush() # ensure run.id is available + # Legacy behavior: link any open internal tickets (and propagate PSA linkage) to new runs. + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass + + + # Update mail message to reflect approval mail.job_id = job.id if hasattr(mail, "approved"): diff --git a/containers/backupchecks/src/backend/app/main/routes_api.py b/containers/backupchecks/src/backend/app/main/routes_api.py index 0da7b4f..395968a 100644 --- a/containers/backupchecks/src/backend/app/main/routes_api.py +++ b/containers/backupchecks/src/backend/app/main/routes_api.py @@ -347,6 +347,8 @@ def api_ticket_resolve(ticket_id: int): open_scope = TicketScope.query.filter_by(ticket_id=ticket.id, resolved_at=None).first() if open_scope is None and ticket.resolved_at is None: ticket.resolved_at = now + if getattr(ticket, "resolved_origin", None) is None: + ticket.resolved_origin = "backupchecks" db.session.commit() except Exception as exc: @@ -358,6 +360,8 @@ def api_ticket_resolve(ticket_id: int): # Global resolve (from central ticket list): resolve ticket and all scopes if ticket.resolved_at is None: ticket.resolved_at = now + if getattr(ticket, "resolved_origin", None) is None: + ticket.resolved_origin = "backupchecks" try: # Resolve any still-open scopes diff --git a/containers/backupchecks/src/backend/app/main/routes_customers.py b/containers/backupchecks/src/backend/app/main/routes_customers.py index 688ecb0..e54ab83 100644 --- a/containers/backupchecks/src/backend/app/main/routes_customers.py +++ b/containers/backupchecks/src/backend/app/main/routes_customers.py @@ -1,11 +1,48 @@ from .routes_shared import * # noqa: F401,F403 +# Explicit imports for robustness across mixed deployments. +from datetime import datetime + +from ..database import db +from ..models import SystemSettings + + +def _get_or_create_settings_local(): + """Return SystemSettings, creating a default row if missing. + + This module should not depend on star-imported helpers for settings. + Mixed deployments (partial container updates) can otherwise raise a + NameError on /customers when the shared helper is not present. + """ + + settings = SystemSettings.query.first() + if settings is None: + settings = SystemSettings( + auto_import_enabled=False, + auto_import_interval_minutes=15, + auto_import_max_items=50, + manual_import_batch_size=50, + auto_import_cutoff_date=datetime.utcnow().date(), + ingest_eml_retention_days=7, + ) + db.session.add(settings) + db.session.commit() + return settings + @main_bp.route("/customers") @login_required @roles_required("admin", "operator", "viewer") def customers(): items = Customer.query.order_by(Customer.name.asc()).all() + settings = _get_or_create_settings_local() + autotask_enabled = bool(getattr(settings, "autotask_enabled", False)) + autotask_configured = bool( + (getattr(settings, "autotask_api_username", None)) + and (getattr(settings, "autotask_api_password", None)) + and (getattr(settings, "autotask_tracking_identifier", None)) + ) + rows = [] for c in items: # Count jobs linked to this customer @@ -19,6 +56,14 @@ def customers(): "name": c.name, "active": bool(c.active), "job_count": job_count, + "autotask_company_id": getattr(c, "autotask_company_id", None), + "autotask_company_name": getattr(c, "autotask_company_name", None), + "autotask_mapping_status": getattr(c, "autotask_mapping_status", None), + "autotask_last_sync_at": ( + getattr(c, "autotask_last_sync_at", None).isoformat(timespec="seconds") + if getattr(c, "autotask_last_sync_at", None) + else None + ), } ) @@ -28,9 +73,259 @@ def customers(): "main/customers.html", customers=rows, can_manage=can_manage, + autotask_enabled=autotask_enabled, + autotask_configured=autotask_configured, ) +def _get_autotask_client_or_raise(): + """Build an AutotaskClient from settings or raise a user-safe exception.""" + settings = _get_or_create_settings_local() + if not bool(getattr(settings, "autotask_enabled", False)): + raise RuntimeError("Autotask integration is disabled.") + if not settings.autotask_api_username or not settings.autotask_api_password or not settings.autotask_tracking_identifier: + raise RuntimeError("Autotask settings incomplete.") + + from ..integrations.autotask.client import AutotaskClient + + return AutotaskClient( + username=settings.autotask_api_username, + password=settings.autotask_api_password, + api_integration_code=settings.autotask_tracking_identifier, + environment=(settings.autotask_environment or "production"), + ) + + +@main_bp.get("/api/autotask/companies/search") +@login_required +@roles_required("admin", "operator") +def api_autotask_companies_search(): + q = (request.args.get("q") or "").strip() + if not q: + return jsonify({"status": "ok", "items": []}) + + try: + client = _get_autotask_client_or_raise() + items = client.search_companies(q, limit=25) + return jsonify({"status": "ok", "items": items}) + except Exception as exc: + return jsonify({"status": "error", "message": str(exc) or "Search failed."}), 400 + + +def _normalize_company_name(company: dict) -> str: + # Autotask REST payload shapes vary between tenants/endpoints. + # - Some single-entity GETs return {"item": {...}} + # - Some may return {"items": [{...}]} + if isinstance(company, dict): + item = company.get("item") + if isinstance(item, dict): + company = item + else: + items = company.get("items") + if isinstance(items, list) and items and isinstance(items[0], dict): + company = items[0] + + return str( + (company or {}).get("companyName") + or (company or {}).get("CompanyName") + or (company or {}).get("name") + or (company or {}).get("Name") + or "" + ).strip() + + +@main_bp.get("/api/customers//autotask-mapping") +@login_required +@roles_required("admin", "operator", "viewer") +def api_customer_autotask_mapping_get(customer_id: int): + c = Customer.query.get_or_404(customer_id) + return jsonify( + { + "status": "ok", + "customer": { + "id": c.id, + "autotask_company_id": getattr(c, "autotask_company_id", None), + "autotask_company_name": getattr(c, "autotask_company_name", None), + "autotask_mapping_status": getattr(c, "autotask_mapping_status", None), + "autotask_last_sync_at": ( + getattr(c, "autotask_last_sync_at", None).isoformat(timespec="seconds") + if getattr(c, "autotask_last_sync_at", None) + else None + ), + }, + } + ) + + +@main_bp.post("/api/customers//autotask-mapping") +@login_required +@roles_required("admin", "operator") +def api_customer_autotask_mapping_set(customer_id: int): + c = Customer.query.get_or_404(customer_id) + payload = request.get_json(silent=True) or {} + company_id = payload.get("company_id") + try: + company_id_int = int(company_id) + except Exception: + return jsonify({"status": "error", "message": "Invalid company_id."}), 400 + + try: + client = _get_autotask_client_or_raise() + company = client.get_company(company_id_int) + name = _normalize_company_name(company) + + c.autotask_company_id = company_id_int + c.autotask_company_name = name + c.autotask_mapping_status = "ok" + c.autotask_last_sync_at = datetime.utcnow() + + db.session.commit() + return jsonify({"status": "ok"}) + except Exception as exc: + db.session.rollback() + return jsonify({"status": "error", "message": str(exc) or "Failed to set mapping."}), 400 + + +@main_bp.post("/api/customers//autotask-mapping/clear") +@login_required +@roles_required("admin", "operator") +def api_customer_autotask_mapping_clear(customer_id: int): + c = Customer.query.get_or_404(customer_id) + try: + c.autotask_company_id = None + c.autotask_company_name = None + c.autotask_mapping_status = None + c.autotask_last_sync_at = datetime.utcnow() + db.session.commit() + return jsonify({"status": "ok"}) + except Exception as exc: + db.session.rollback() + return jsonify({"status": "error", "message": str(exc) or "Failed to clear mapping."}), 400 + + +@main_bp.post("/api/customers//autotask-mapping/refresh") +@login_required +@roles_required("admin", "operator") +def api_customer_autotask_mapping_refresh(customer_id: int): + from ..integrations.autotask.client import AutotaskError + + c = Customer.query.get_or_404(customer_id) + company_id = getattr(c, "autotask_company_id", None) + if not company_id: + return jsonify({"status": "ok", "mapping_status": None}) + + try: + client = _get_autotask_client_or_raise() + company = client.get_company(int(company_id)) + name = _normalize_company_name(company) + + prev = (getattr(c, "autotask_company_name", None) or "").strip() + if prev and name and prev != name: + c.autotask_company_name = name + c.autotask_mapping_status = "renamed" + else: + c.autotask_company_name = name + c.autotask_mapping_status = "ok" + c.autotask_last_sync_at = datetime.utcnow() + + db.session.commit() + return jsonify({"status": "ok", "mapping_status": c.autotask_mapping_status, "company_name": c.autotask_company_name}) + except AutotaskError as exc: + try: + code = getattr(exc, "status_code", None) + except Exception: + code = None + + # 404 -> deleted/missing company in Autotask + if code == 404: + try: + c.autotask_mapping_status = "invalid" + c.autotask_last_sync_at = datetime.utcnow() + db.session.commit() + except Exception: + db.session.rollback() + return jsonify({"status": "ok", "mapping_status": "invalid"}) + + # Other errors: keep mapping but mark as missing (temporary/unreachable) + try: + c.autotask_mapping_status = "missing" + c.autotask_last_sync_at = datetime.utcnow() + db.session.commit() + except Exception: + db.session.rollback() + return jsonify({"status": "ok", "mapping_status": "missing", "message": str(exc)}) + except Exception as exc: + db.session.rollback() + return jsonify({"status": "error", "message": str(exc) or "Refresh failed."}), 400 + + +@main_bp.post("/api/customers/autotask-mapping/refresh-all") +@login_required +@roles_required("admin", "operator") +def api_customers_autotask_mapping_refresh_all(): + """Refresh mapping status for all customers that have an Autotask company ID.""" + + from ..integrations.autotask.client import AutotaskError + + customers = Customer.query.filter(Customer.autotask_company_id.isnot(None)).all() + if not customers: + return jsonify({"status": "ok", "refreshed": 0, "counts": {"ok": 0, "renamed": 0, "missing": 0, "invalid": 0}}) + + try: + client = _get_autotask_client_or_raise() + except Exception as exc: + return jsonify({"status": "error", "message": str(exc) or "Autotask is not configured."}), 400 + + counts = {"ok": 0, "renamed": 0, "missing": 0, "invalid": 0} + refreshed = 0 + + for c in customers: + company_id = getattr(c, "autotask_company_id", None) + if not company_id: + continue + try: + company = client.get_company(int(company_id)) + name = _normalize_company_name(company) + + prev = (getattr(c, "autotask_company_name", None) or "").strip() + if prev and name and prev != name: + c.autotask_company_name = name + c.autotask_mapping_status = "renamed" + counts["renamed"] += 1 + else: + c.autotask_company_name = name + c.autotask_mapping_status = "ok" + counts["ok"] += 1 + c.autotask_last_sync_at = datetime.utcnow() + refreshed += 1 + except AutotaskError as exc: + try: + code = getattr(exc, "status_code", None) + except Exception: + code = None + + if code == 404: + c.autotask_mapping_status = "invalid" + counts["invalid"] += 1 + else: + c.autotask_mapping_status = "missing" + counts["missing"] += 1 + c.autotask_last_sync_at = datetime.utcnow() + refreshed += 1 + except Exception: + c.autotask_mapping_status = "missing" + c.autotask_last_sync_at = datetime.utcnow() + counts["missing"] += 1 + refreshed += 1 + + try: + db.session.commit() + return jsonify({"status": "ok", "refreshed": refreshed, "counts": counts}) + except Exception as exc: + db.session.rollback() + return jsonify({"status": "error", "message": str(exc) or "Failed to refresh all mappings."}), 400 + + @main_bp.route("/customers/create", methods=["POST"]) @login_required @roles_required("admin", "operator") diff --git a/containers/backupchecks/src/backend/app/main/routes_inbox.py b/containers/backupchecks/src/backend/app/main/routes_inbox.py index 5ed206f..c558717 100644 --- a/containers/backupchecks/src/backend/app/main/routes_inbox.py +++ b/containers/backupchecks/src/backend/app/main/routes_inbox.py @@ -4,6 +4,7 @@ from .routes_shared import _format_datetime, _log_admin_event, _send_mail_messag from ..email_utils import extract_best_html_from_eml, is_effectively_blank_html from ..parsers.veeam import extract_vspc_active_alarms_companies from ..models import MailObject +from ..ticketing_utils import link_open_internal_tickets_to_run import time import re @@ -294,6 +295,11 @@ def inbox_message_approve(message_id: int): if hasattr(run, 'storage_free_percent') and hasattr(msg, 'storage_free_percent'): run.storage_free_percent = msg.storage_free_percent db.session.add(run) + db.session.flush() + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass # Update mail message to reflect approval msg.job_id = job.id @@ -437,7 +443,7 @@ def inbox_message_approve_vspc_companies(message_id: int): auto_approve=True, ) db.session.add(job) - db.session.flush() + db.session.flush() # Commit any mapping updates so they are visible immediately in the UI. try: @@ -507,7 +513,7 @@ def inbox_message_approve_vspc_companies(message_id: int): auto_approve=True, ) db.session.add(job) - db.session.flush() + db.session.flush() if not first_job: first_job = job @@ -523,6 +529,7 @@ def inbox_message_approve_vspc_companies(message_id: int): # De-duplicate: do not create multiple runs for the same (mail_message_id, job_id). run = JobRun.query.filter(JobRun.job_id == job.id, JobRun.mail_message_id == msg.id).first() + created = False if run: skipped_existing += 1 else: @@ -535,9 +542,17 @@ def inbox_message_approve_vspc_companies(message_id: int): ) if hasattr(run, "remark"): run.remark = getattr(msg, "overall_message", None) - db.session.add(run) - db.session.flush() + created = True + + # Ensure we have IDs before linking tickets or persisting objects. + db.session.flush() + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass + + if created: created_runs.append(run) # Persist objects for reporting (idempotent upsert; safe to repeat). @@ -683,7 +698,12 @@ def inbox_message_approve_vspc_companies(message_id: int): if hasattr(run2, "remark"): run2.remark = getattr(other, "overall_message", None) db.session.add(run2) - db.session.flush() + + db.session.flush() + try: + link_open_internal_tickets_to_run(run=run2, job=job2) + except Exception: + pass # Persist objects per company try: @@ -1049,7 +1069,12 @@ def inbox_reparse_all(): run.storage_free_percent = msg.storage_free_percent db.session.add(run) + db.session.flush() + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id)) created_any = True @@ -1110,6 +1135,10 @@ def inbox_reparse_all(): db.session.add(run) db.session.flush() # ensure run.id is available + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id)) msg.job_id = job.id @@ -1209,6 +1238,10 @@ def inbox_reparse_all(): db.session.add(run) db.session.flush() + try: + link_open_internal_tickets_to_run(run=run, job=job) + except Exception: + pass auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id)) msg.job_id = job.id @@ -1287,4 +1320,4 @@ def inbox_reparse_all(): "info", ) - return redirect(url_for("main.inbox")) + return redirect(url_for("main.inbox")) \ No newline at end of file diff --git a/containers/backupchecks/src/backend/app/main/routes_run_checks.py b/containers/backupchecks/src/backend/app/main/routes_run_checks.py index b073af2..6e67d72 100644 --- a/containers/backupchecks/src/backend/app/main/routes_run_checks.py +++ b/containers/backupchecks/src/backend/app/main/routes_run_checks.py @@ -4,7 +4,8 @@ import calendar from datetime import date, datetime, time, timedelta, timezone -from flask import jsonify, render_template, request +from flask import jsonify, render_template, request, url_for +from urllib.parse import urljoin from flask_login import current_user, login_required from sqlalchemy import and_, or_, func, text @@ -32,9 +33,527 @@ from ..models import ( MailMessage, MailObject, Override, + Ticket, + TicketJobRun, + TicketScope, User, ) + +AUTOTASK_TERMINAL_STATUS_IDS = {5} + + +def _ensure_internal_ticket_for_autotask( + *, + ticket_number: str, + job: Job | None, + run_ids: list[int], + now: datetime, + active_from_dt: datetime | None, +) -> Ticket | None: + """Best-effort: ensure an internal Ticket exists and is linked to the provided runs.""" + + code = (ticket_number or "").strip().upper() + if not code: + return None + + ticket = Ticket.query.filter(Ticket.ticket_code == code).first() + + if ticket is None: + # Align with manual ticket creation: active_from_date must be <= the run date + # so legacy ticket visibility works for historical runs. + base_dt = active_from_dt or now + active_from = _to_amsterdam_date(base_dt) or base_dt.date() + ticket = Ticket( + ticket_code=code, + description="", + active_from_date=active_from, + start_date=now, + ) + db.session.add(ticket) + db.session.flush() + + # Ensure job scope exists (for Daily Jobs / Job Details filtering), best-effort. + if job is not None and getattr(job, "id", None): + try: + existing = TicketScope.query.filter_by(ticket_id=ticket.id, scope_type="job", job_id=job.id).first() + if existing is None: + db.session.add( + TicketScope( + ticket_id=ticket.id, + scope_type="job", + customer_id=job.customer_id, + backup_software=job.backup_software, + backup_type=job.backup_type, + job_id=job.id, + job_name_match=job.job_name, + job_name_match_mode="exact", + ) + ) + except Exception: + pass + + # Ensure run links. + for rid in run_ids or []: + if rid <= 0: + continue + if not TicketJobRun.query.filter_by(ticket_id=ticket.id, job_run_id=rid).first(): + db.session.add(TicketJobRun(ticket_id=ticket.id, job_run_id=rid, link_source="autotask")) + + return ticket + + +def _resolve_internal_ticket_for_job( + *, + ticket: Ticket, + job: Job | None, + run_ids: list[int], + now: datetime, + origin: str = "psa", +) -> None: + """Resolve the ticket (and its job scope) as PSA-driven, best-effort.""" + + if ticket.resolved_at is None: + ticket.resolved_at = now + if not (getattr(ticket, "resolved_origin", None) or "").strip(): + ticket.resolved_origin = origin + + # Resolve all still-open scopes. + try: + TicketScope.query.filter_by(ticket_id=ticket.id, resolved_at=None).update({"resolved_at": now}) + except Exception: + pass + + # Ensure job scope exists and is resolved. + if job is not None and getattr(job, "id", None): + try: + scope = TicketScope.query.filter_by(ticket_id=ticket.id, scope_type="job", job_id=job.id).first() + if scope is None: + scope = TicketScope( + ticket_id=ticket.id, + scope_type="job", + customer_id=job.customer_id, + backup_software=job.backup_software, + backup_type=job.backup_type, + job_id=job.id, + job_name_match=job.job_name, + job_name_match_mode="exact", + resolved_at=now, + ) + db.session.add(scope) + else: + if scope.resolved_at is None: + scope.resolved_at = now + except Exception: + pass + + # Keep audit links to runs. + for rid in run_ids or []: + if rid <= 0: + continue + if not TicketJobRun.query.filter_by(ticket_id=ticket.id, job_run_id=rid).first(): + db.session.add(TicketJobRun(ticket_id=ticket.id, job_run_id=rid, link_source="autotask")) + + +def _poll_autotask_ticket_states_for_runs(*, run_ids: list[int]) -> None: + """Phase 2: Read-only PSA-driven ticket completion sync. + + Best-effort: never blocks page load. + """ + + if not run_ids: + return + + settings = _get_or_create_settings() + if not bool(getattr(settings, "autotask_enabled", False)): + return + + # Build ticket id -> run ids mapping. + runs = JobRun.query.filter(JobRun.id.in_(run_ids)).all() + ticket_to_runs: dict[int, list[JobRun]] = {} + for r in runs: + tid = getattr(r, "autotask_ticket_id", None) + try: + tid_int = int(tid) if tid is not None else 0 + except Exception: + tid_int = 0 + if tid_int <= 0: + continue + ticket_to_runs.setdefault(tid_int, []).append(r) + + if not ticket_to_runs: + return + + try: + client = _build_autotask_client_from_settings() + except Exception: + return + + now = datetime.utcnow() + ticket_ids = sorted(ticket_to_runs.keys()) + + # Deleted tickets: check DeletedTicketLogs first (authoritative). + deleted_map: dict[int, dict] = {} + try: + deleted_items = client.query_deleted_ticket_logs_by_ticket_ids(ticket_ids) + except Exception: + deleted_items = [] + + for it in deleted_items or []: + if not isinstance(it, dict): + continue + raw_tid = it.get("ticketID") if "ticketID" in it else it.get("ticketId") + try: + tid_int = int(raw_tid) if raw_tid is not None else 0 + except Exception: + tid_int = 0 + if tid_int <= 0: + continue + deleted_map[tid_int] = it + + # Best-effort: resolve deletedByResourceID to display names. + resource_cache: dict[int, dict] = {} + resource_ids: set[int] = set() + for _tid, _item in deleted_map.items(): + raw_rid = _item.get("deletedByResourceID") if "deletedByResourceID" in _item else _item.get("deletedByResourceId") + try: + rid_int = int(raw_rid) if raw_rid is not None else 0 + except Exception: + rid_int = 0 + if rid_int > 0: + resource_ids.add(rid_int) + + for rid in sorted(resource_ids): + try: + resource_cache[rid] = client.get_resource(rid) + except Exception: + continue + + # Persist deleted audit fields on runs and resolve internal ticket as PSA-deleted. + for tid, item in deleted_map.items(): + runs_for_ticket = ticket_to_runs.get(tid) or [] + if not runs_for_ticket: + continue + deleted_by = item.get("deletedByResourceID") if "deletedByResourceID" in item else item.get("deletedByResourceId") + deleted_dt_raw = item.get("deletedDateTime") or item.get("deletedDatetime") or item.get("deletedAt") + deleted_dt = None + if deleted_dt_raw: + try: + s = str(deleted_dt_raw).replace("Z", "+00:00") + deleted_dt = datetime.fromisoformat(s) + if deleted_dt.tzinfo is not None: + deleted_dt = deleted_dt.astimezone(timezone.utc).replace(tzinfo=None) + except Exception: + deleted_dt = None + try: + deleted_by_int = int(deleted_by) if deleted_by is not None else None + except Exception: + deleted_by_int = None + + first_name = None + last_name = None + if deleted_by_int and deleted_by_int in resource_cache: + try: + rrsrc = resource_cache.get(deleted_by_int) or {} + fn = (rrsrc.get("firstName") or "").strip() + ln = (rrsrc.get("lastName") or "").strip() + first_name = fn if fn else None + last_name = ln if ln else None + except Exception: + first_name = None + last_name = None + + # Backfill ticket number (if present in log) + ticket_number = item.get("ticketNumber") or item.get("ticket_number") + for rr in runs_for_ticket: + if deleted_dt and getattr(rr, "autotask_ticket_deleted_at", None) is None: + rr.autotask_ticket_deleted_at = deleted_dt + if deleted_by_int and getattr(rr, "autotask_ticket_deleted_by_resource_id", None) is None: + rr.autotask_ticket_deleted_by_resource_id = deleted_by_int + if first_name and getattr(rr, "autotask_ticket_deleted_by_first_name", None) is None: + rr.autotask_ticket_deleted_by_first_name = first_name + if last_name and getattr(rr, "autotask_ticket_deleted_by_last_name", None) is None: + rr.autotask_ticket_deleted_by_last_name = last_name + if ticket_number and not (getattr(rr, "autotask_ticket_number", None) or "").strip(): + rr.autotask_ticket_number = str(ticket_number).strip() + db.session.add(rr) + + # Resolve internal ticket with origin psa_deleted (best-effort) + tn = "" + if ticket_number: + tn = str(ticket_number).strip() + if not tn: + for rr in runs_for_ticket: + if (getattr(rr, "autotask_ticket_number", None) or "").strip(): + tn = rr.autotask_ticket_number.strip() + break + job = Job.query.get(runs_for_ticket[0].job_id) if runs_for_ticket else None + active_from_dt = None + try: + dts = [getattr(x, "run_at", None) for x in runs_for_ticket if getattr(x, "run_at", None)] + active_from_dt = min(dts) if dts else None + except Exception: + active_from_dt = None + internal_ticket = _ensure_internal_ticket_for_autotask( + ticket_number=tn, + job=job, + run_ids=[int(x.id) for x in runs_for_ticket if getattr(x, "id", None)], + now=now, + active_from_dt=active_from_dt, + ) + if internal_ticket is not None: + _resolve_internal_ticket_for_job( + ticket=internal_ticket, + job=job, + run_ids=[int(x.id) for x in runs_for_ticket if getattr(x, "id", None)], + now=deleted_dt or now, + origin="psa_deleted", + ) + + # Optimization: query non-terminal tickets first; fallback to GET by id for missing. + try: + active_items = client.query_tickets_by_ids(ticket_ids, exclude_status_ids=sorted(AUTOTASK_TERMINAL_STATUS_IDS)) + except Exception: + active_items = [] + + active_map: dict[int, dict] = {} + for it in active_items or []: + try: + iid = int(it.get("id") or 0) + except Exception: + iid = 0 + if iid > 0: + active_map[iid] = it + + missing_ids = [tid for tid in ticket_ids if tid not in active_map and tid not in deleted_map] + + # Process active tickets: backfill ticket numbers + ensure internal ticket link. + try: + for tid, item in active_map.items(): + runs_for_ticket = ticket_to_runs.get(tid) or [] + ticket_number = None + if isinstance(item, dict): + ticket_number = item.get("ticketNumber") or item.get("number") or item.get("ticket_number") + # Backfill missing stored ticket number. + if ticket_number: + for rr in runs_for_ticket: + if not (getattr(rr, "autotask_ticket_number", None) or "").strip(): + rr.autotask_ticket_number = str(ticket_number).strip() + db.session.add(rr) + + # Ensure internal ticket exists and is linked. + tn = (str(ticket_number).strip() if ticket_number else "") + if not tn: + # Try from DB + for rr in runs_for_ticket: + if (getattr(rr, "autotask_ticket_number", None) or "").strip(): + tn = rr.autotask_ticket_number.strip() + break + + job = Job.query.get(runs_for_ticket[0].job_id) if runs_for_ticket else None + active_from_dt = None + try: + dts = [getattr(x, 'run_at', None) for x in runs_for_ticket if getattr(x, 'run_at', None)] + active_from_dt = min(dts) if dts else None + except Exception: + active_from_dt = None + _ensure_internal_ticket_for_autotask( + ticket_number=tn, + job=job, + run_ids=[int(x.id) for x in runs_for_ticket if getattr(x, "id", None)], + now=now, + active_from_dt=active_from_dt, + ) + except Exception: + # Continue to missing-id fallback. + pass + + # Fallback for missing ids (could be terminal, deleted, or query omission). + for tid in missing_ids: + try: + t = client.get_ticket(tid) + except Exception: + continue + + status_id = None + if isinstance(t, dict): + status_id = t.get("status") or t.get("statusId") or t.get("statusID") + try: + status_int = int(status_id) if status_id is not None else 0 + except Exception: + status_int = 0 + + ticket_number = None + if isinstance(t, dict): + ticket_number = t.get("ticketNumber") or t.get("number") or t.get("ticket_number") + + runs_for_ticket = ticket_to_runs.get(tid) or [] + # Backfill stored ticket number if missing. + if ticket_number: + for rr in runs_for_ticket: + if not (getattr(rr, "autotask_ticket_number", None) or "").strip(): + rr.autotask_ticket_number = str(ticket_number).strip() + db.session.add(rr) + + job = Job.query.get(runs_for_ticket[0].job_id) if runs_for_ticket else None + + active_from_dt = None + try: + dts = [getattr(x, 'run_at', None) for x in runs_for_ticket if getattr(x, 'run_at', None)] + active_from_dt = min(dts) if dts else None + except Exception: + active_from_dt = None + + tn = (str(ticket_number).strip() if ticket_number else "") + if not tn: + for rr in runs_for_ticket: + if (getattr(rr, "autotask_ticket_number", None) or "").strip(): + tn = rr.autotask_ticket_number.strip() + break + + internal_ticket = _ensure_internal_ticket_for_autotask( + ticket_number=tn, + job=job, + run_ids=[int(x.id) for x in runs_for_ticket if getattr(x, "id", None)], + now=now, + active_from_dt=active_from_dt, + ) + + # If terminal in PSA: resolve internally. + resolved_at = None + try: + if isinstance(t, dict): + resolved_at_raw = t.get("resolvedDateTime") or t.get("completedDate") or t.get("completedDateTime") + else: + resolved_at_raw = None + if resolved_at_raw: + s_dt = str(resolved_at_raw).replace("Z", "+00:00") + resolved_at = datetime.fromisoformat(s_dt) + if resolved_at.tzinfo is not None: + resolved_at = resolved_at.astimezone(timezone.utc).replace(tzinfo=None) + except Exception: + resolved_at = None + + is_terminal = False + if status_int in AUTOTASK_TERMINAL_STATUS_IDS: + is_terminal = True + if resolved_at is not None: + is_terminal = True + + if internal_ticket is not None and is_terminal: + _resolve_internal_ticket_for_job( + ticket=internal_ticket, + job=job, + run_ids=[int(x.id) for x in runs_for_ticket if getattr(x, "id", None)], + now=resolved_at or now, + origin="psa", + ) + + try: + db.session.commit() + except Exception: + db.session.rollback() + + +def _build_autotask_client_from_settings(): + """Build an AutotaskClient from stored settings or raise a user-safe exception.""" + settings = _get_or_create_settings() + if not getattr(settings, "autotask_enabled", False): + raise RuntimeError("Autotask integration is disabled.") + + required = [ + getattr(settings, "autotask_environment", None), + getattr(settings, "autotask_api_username", None), + getattr(settings, "autotask_api_password", None), + getattr(settings, "autotask_tracking_identifier", None), + ] + if any(not (x and str(x).strip()) for x in required): + raise RuntimeError("Autotask settings incomplete.") + + from ..integrations.autotask.client import AutotaskClient + + return AutotaskClient( + username=settings.autotask_api_username, + password=settings.autotask_api_password, + api_integration_code=settings.autotask_tracking_identifier, + environment=settings.autotask_environment, + ) + + +def _determine_autotask_severity(status_text: str | None) -> str: + s = (status_text or "").strip().lower() + if "warning" in s: + return "warning" + if "error" in s or "fail" in s: + return "error" + if "missed" in s: + return "error" + return "warning" + + +def _compose_autotask_ticket_description( + *, + settings, + job: Job, + run: JobRun, + status_display: str, + overall_message: str, + objects_payload: list[dict[str, str]], +) -> str: + tz_name = _get_ui_timezone_name() or "Europe/Amsterdam" + run_dt = run.run_at + run_at_str = _format_datetime(run_dt) if run_dt else "-" + + base_url = (getattr(settings, "autotask_base_url", None) or "").strip() + job_rel = url_for("main.job_detail", job_id=job.id) + # Link to Job Details with a hint for the specific run. + job_link = urljoin(base_url.rstrip("/") + "/", job_rel.lstrip("/")) + if run.id: + job_link = f"{job_link}?run_id={int(run.id)}" + + lines: list[str] = [] + lines.append(f"Customer: {job.customer.name if job.customer else ''}") + lines.append(f"Job: {job.job_name or ''}") + lines.append(f"Backup: {job.backup_software or ''} / {job.backup_type or ''}") + lines.append(f"Run at ({tz_name}): {run_at_str}") + lines.append(f"Status: {status_display or ''}") + lines.append("") + + overall_message = (overall_message or "").strip() + if overall_message: + lines.append("Summary:") + lines.append(overall_message) + lines.append("") + lines.append("Multiple objects reported messages. See Backupchecks for full details.") + else: + # Fallback to object-level messages with a hard limit. + limit = 10 + shown = 0 + total = 0 + for o in objects_payload or []: + name = (o.get("name") or "").strip() + err = (o.get("error_message") or "").strip() + st = (o.get("status") or "").strip() + if not name: + continue + if not err and not st: + continue + total += 1 + if shown >= limit: + continue + msg = err or st + lines.append(f"- {name}: {msg}") + shown += 1 + + if total == 0: + lines.append("No detailed object messages available. See Backupchecks for full details.") + elif total > shown: + lines.append(f"And {int(total - shown)} additional objects reported similar messages.") + + lines.append("") + lines.append(f"Backupchecks details: {job_link}") + return "\n".join(lines).strip() + "\n" + # Grace window for matching real runs to an expected schedule slot. # A run within +/- 1 hour of the inferred schedule time counts as fulfilling the slot. MISSED_GRACE_WINDOW = timedelta(hours=1) @@ -338,6 +857,15 @@ def run_checks_page(): # Don't block the page if missed-run generation fails. pass + # Phase 2 (read-only PSA driven): sync internal ticket resolved state based on PSA ticket status. + # Best-effort: never blocks page load. + try: + run_q = JobRun.query.filter(JobRun.reviewed_at.is_(None), JobRun.autotask_ticket_id.isnot(None)) + run_ids = [int(x) for (x,) in run_q.with_entities(JobRun.id).limit(800).all()] + _poll_autotask_ticket_states_for_runs(run_ids=run_ids) + except Exception: + pass + # Aggregated per-job rows base = ( db.session.query( @@ -595,11 +1123,15 @@ def run_checks_page(): } ) + settings = _get_or_create_settings() + autotask_enabled = bool(getattr(settings, "autotask_enabled", False)) + return render_template( "main/run_checks.html", rows=payload, is_admin=(get_active_role() == "admin"), include_reviewed=include_reviewed, + autotask_enabled=autotask_enabled, ) @@ -627,6 +1159,20 @@ def run_checks_details(): runs = q.order_by(func.coalesce(JobRun.run_at, JobRun.created_at).desc(), JobRun.id.desc()).limit(400).all() + # Prefetch internal ticket resolution info for Autotask-linked runs (Phase 2 UI). + autotask_codes = set() + for _r in runs: + code = (getattr(_r, "autotask_ticket_number", None) or "").strip() + if code: + autotask_codes.add(code) + ticket_by_code = {} + if autotask_codes: + try: + for _t in Ticket.query.filter(Ticket.ticket_code.in_(list(autotask_codes))).all(): + ticket_by_code[_t.ticket_code] = _t + except Exception: + ticket_by_code = {} + runs_payload = [] for run in runs: msg = MailMessage.query.get(run.mail_message_id) if run.mail_message_id else None @@ -732,6 +1278,20 @@ def run_checks_details(): except Exception: pass + # Autotask ticket resolution info (derived from internal Ticket) + at_resolved = False + at_resolved_origin = "" + at_resolved_at = "" + try: + _code = (getattr(run, "autotask_ticket_number", None) or "").strip() + if _code and _code in ticket_by_code: + _t = ticket_by_code[_code] + at_resolved = getattr(_t, "resolved_at", None) is not None + at_resolved_origin = (getattr(_t, "resolved_origin", None) or "") + at_resolved_at = _format_datetime(getattr(_t, "resolved_at", None)) if getattr(_t, "resolved_at", None) else "" + except Exception: + pass + status_display = run.status or "-" try: status_display, _, _, _ov_id, _ov_reason = _apply_overrides_to_run(job, run) @@ -753,6 +1313,16 @@ def run_checks_details(): "mail": mail_meta, "body_html": body_html, "objects": objects_payload, + "autotask_ticket_id": getattr(run, "autotask_ticket_id", None), + "autotask_ticket_number": getattr(run, "autotask_ticket_number", None) or "", + "autotask_ticket_is_resolved": bool(at_resolved), + "autotask_ticket_resolved_origin": at_resolved_origin, + "autotask_ticket_resolved_at": at_resolved_at, + "autotask_ticket_is_deleted": bool(getattr(run, "autotask_ticket_deleted_at", None)), + "autotask_ticket_deleted_at": _format_datetime(getattr(run, "autotask_ticket_deleted_at", None)) if getattr(run, "autotask_ticket_deleted_at", None) else "", + "autotask_ticket_deleted_by_resource_id": getattr(run, "autotask_ticket_deleted_by_resource_id", None), + "autotask_ticket_deleted_by_first_name": getattr(run, "autotask_ticket_deleted_by_first_name", None) or "", + "autotask_ticket_deleted_by_last_name": getattr(run, "autotask_ticket_deleted_by_last_name", None) or "", } ) @@ -770,6 +1340,562 @@ def run_checks_details(): return jsonify({"status": "ok", "job": job_payload, "runs": runs_payload}) +@main_bp.post("/api/run-checks/autotask-ticket") +@login_required +@roles_required("admin", "operator") +def api_run_checks_create_autotask_ticket(): + """Create an Autotask ticket for a specific run. + + Enforces: exactly one ticket per run. + """ + data = request.get_json(silent=True) or {} + try: + run_id = int(data.get("run_id") or 0) + except Exception: + run_id = 0 + + if run_id <= 0: + return jsonify({"status": "error", "message": "Invalid parameters."}), 400 + + run = JobRun.query.get(run_id) + if not run: + return jsonify({"status": "error", "message": "Run not found."}), 404 + + # If a ticket is already linked we normally prevent duplicate creation. + # Exception: if the linked ticket is resolved (e.g. resolved by PSA), allow creating a new ticket. + if getattr(run, "autotask_ticket_id", None): + already_resolved = False + try: + code = (getattr(run, "autotask_ticket_number", None) or "").strip() + if code: + t = Ticket.query.filter_by(ticket_code=code).first() + already_resolved = bool(getattr(t, "resolved_at", None)) if t else False + except Exception: + already_resolved = False + if not already_resolved: + return jsonify( + { + "status": "ok", + "ticket_id": int(run.autotask_ticket_id), + "ticket_number": getattr(run, "autotask_ticket_number", None) or "", + "already_exists": True, + } + ) + # resolved -> continue, create a new Autotask ticket and overwrite current linkage. + + job = Job.query.get(run.job_id) + if not job: + return jsonify({"status": "error", "message": "Job not found."}), 404 + + customer = Customer.query.get(job.customer_id) if getattr(job, "customer_id", None) else None + if not customer: + return jsonify({"status": "error", "message": "Customer not found."}), 404 + + if not getattr(customer, "autotask_company_id", None): + return jsonify({"status": "error", "message": "Customer has no Autotask company mapping."}), 400 + + if (getattr(customer, "autotask_mapping_status", None) or "").strip().lower() not in ("ok", "renamed"): + return jsonify({"status": "error", "message": "Autotask company mapping is not valid."}), 400 + + settings = _get_or_create_settings() + + base_url = (getattr(settings, "autotask_base_url", None) or "").strip() + if not base_url: + return jsonify({"status": "error", "message": "Autotask Base URL is not configured."}), 400 + + # Required ticket defaults + if not getattr(settings, "autotask_default_queue_id", None): + return jsonify({"status": "error", "message": "Autotask default queue is not configured."}), 400 + if not getattr(settings, "autotask_default_ticket_source_id", None): + return jsonify({"status": "error", "message": "Autotask default ticket source is not configured."}), 400 + if not getattr(settings, "autotask_default_ticket_status", None): + return jsonify({"status": "error", "message": "Autotask default ticket status is not configured."}), 400 + + # Determine display status (including overrides) for consistent subject/priority mapping. + status_display = run.status or "-" + try: + status_display, _, _, _ov_id, _ov_reason = _apply_overrides_to_run(job, run) + except Exception: + status_display = run.status or "-" + + severity = _determine_autotask_severity(status_display) + priority_id = None + if severity == "warning": + priority_id = getattr(settings, "autotask_priority_warning", None) + else: + priority_id = getattr(settings, "autotask_priority_error", None) + + # Load mail + objects for ticket composition. + msg = MailMessage.query.get(run.mail_message_id) if run.mail_message_id else None + overall_message = (getattr(msg, "overall_message", None) or "") if msg else "" + + objects_payload: list[dict[str, str]] = [] + try: + objs = run.objects.order_by(JobObject.object_name.asc()).all() + except Exception: + objs = list(run.objects or []) + for o in objs or []: + objects_payload.append( + { + "name": getattr(o, "object_name", "") or "", + "type": getattr(o, "object_type", "") or "", + "status": getattr(o, "status", "") or "", + "error_message": getattr(o, "error_message", "") or "", + } + ) + + if (not objects_payload) and msg: + try: + mos = MailObject.query.filter_by(mail_message_id=msg.id).order_by(MailObject.object_name.asc()).all() + except Exception: + mos = [] + for mo in mos or []: + objects_payload.append( + { + "name": getattr(mo, "object_name", "") or "", + "type": getattr(mo, "object_type", "") or "", + "status": getattr(mo, "status", "") or "", + "error_message": getattr(mo, "error_message", "") or "", + } + ) + + subject = f"[Backupchecks] {customer.name} - {job.job_name or ''} - {status_display}" + description = _compose_autotask_ticket_description( + settings=settings, + job=job, + run=run, + status_display=status_display, + overall_message=overall_message, + objects_payload=objects_payload, + ) + + payload = { + "companyID": int(customer.autotask_company_id), + "title": subject, + "description": description, + "queueID": int(settings.autotask_default_queue_id), + "source": int(settings.autotask_default_ticket_source_id), + "status": int(settings.autotask_default_ticket_status), + } + if priority_id: + payload["priority"] = int(priority_id) + + try: + client = _build_autotask_client_from_settings() + created = client.create_ticket(payload) + except Exception as exc: + return jsonify({"status": "error", "message": f"Autotask ticket creation failed: {exc}"}), 400 + + ticket_id = created.get("id") if isinstance(created, dict) else None + ticket_number = None + if isinstance(created, dict): + ticket_number = created.get("ticketNumber") or created.get("number") or created.get("ticket_number") + + if not ticket_id: + return jsonify({"status": "error", "message": "Autotask did not return a ticket id."}), 400 + + # Mandatory post-create retrieval: create response does not reliably include Ticket Number. + ticket_number_str = (str(ticket_number).strip() if ticket_number is not None else "").strip() + try: + if not ticket_number_str: + fetched = client.get_ticket(int(ticket_id)) + if isinstance(fetched, dict): + ticket_number_str = ( + str(fetched.get("ticketNumber") or fetched.get("number") or fetched.get("ticket_number") or "").strip() + ) + except Exception: + ticket_number_str = ticket_number_str + + now = datetime.utcnow() + + try: + run.autotask_ticket_id = int(ticket_id) + except Exception: + run.autotask_ticket_id = None + + run.autotask_ticket_number = (ticket_number_str or "") or None + run.autotask_ticket_created_at = now + run.autotask_ticket_created_by_user_id = current_user.id + + # Propagate linkage to all active (unreviewed) runs of the same job. + active_runs: list[JobRun] = [] + try: + active_runs = JobRun.query.filter(JobRun.job_id == job.id, JobRun.reviewed_at.is_(None)).all() + except Exception: + active_runs = [run] + + run_ids: list[int] = [] + for rr in active_runs or []: + if getattr(rr, "id", None): + run_ids.append(int(rr.id)) + if getattr(rr, "autotask_ticket_id", None) is None: + rr.autotask_ticket_id = int(ticket_id) + if ticket_number_str and not (getattr(rr, "autotask_ticket_number", None) or "").strip(): + rr.autotask_ticket_number = ticket_number_str + db.session.add(rr) + + # Create/repair internal Ticket + TicketJobRun links (legacy UI compatibility). + if ticket_number_str: + _ensure_internal_ticket_for_autotask(ticket_number=ticket_number_str, job=job, run_ids=run_ids, now=now, active_from_dt=getattr(run, 'run_at', None) or now) + + try: + db.session.add(run) + db.session.commit() + except Exception as exc: + db.session.rollback() + return jsonify({"status": "error", "message": f"Failed to store ticket reference: {exc}"}), 500 + + # If Ticket Number is still unknown, surface that explicitly (ticket id is still stored). + if not (run.autotask_ticket_number or "").strip(): + return jsonify( + { + "status": "ok", + "ticket_id": int(run.autotask_ticket_id) if run.autotask_ticket_id else None, + "ticket_number": "", + "already_exists": False, + "warning": "Ticket created, but ticket number could not be retrieved.", + } + ) + + return jsonify( + { + "status": "ok", + "ticket_id": int(run.autotask_ticket_id) if run.autotask_ticket_id else None, + "ticket_number": run.autotask_ticket_number or "", + "already_exists": False, + } + ) + + + + +@main_bp.get("/api/run-checks/autotask-existing-tickets") +@login_required +@roles_required("admin", "operator") +def api_run_checks_autotask_existing_tickets(): + """List open (non-terminal) Autotask tickets for the selected run's customer. + + Phase 2.2: used by the Run Checks modal to link an existing PSA ticket. + """ + + try: + run_id = int(request.args.get("run_id") or 0) + except Exception: + run_id = 0 + + q = (request.args.get("q") or "").strip() + + if run_id <= 0: + return jsonify({"status": "error", "message": "Invalid parameters."}), 400 + + run = JobRun.query.get(run_id) + if not run: + return jsonify({"status": "error", "message": "Run not found."}), 404 + + job = Job.query.get(run.job_id) + if not job: + return jsonify({"status": "error", "message": "Job not found."}), 404 + + customer = Customer.query.get(job.customer_id) if getattr(job, "customer_id", None) else None + if not customer: + return jsonify({"status": "error", "message": "Customer not found."}), 404 + + if not getattr(customer, "autotask_company_id", None): + return jsonify({"status": "error", "message": "Customer has no Autotask company mapping."}), 400 + + if (getattr(customer, "autotask_mapping_status", None) or "").strip().lower() not in ("ok", "renamed"): + return jsonify({"status": "error", "message": "Autotask company mapping is not valid."}), 400 + + settings = _get_or_create_settings() + + # Map status ID -> label from cached settings (kept in sync by Settings page). + status_map = {} + try: + import json as _json + + raw = getattr(settings, "autotask_cached_ticket_statuses_json", None) + if raw: + for x in (_json.loads(raw) or []): + if isinstance(x, dict) and "value" in x: + status_map[str(x.get("value"))] = str(x.get("label") or "") + except Exception: + status_map = {} + + try: + client = _build_autotask_client_from_settings() + + # Ensure we have a status map; if empty, fetch and cache once. + if not status_map: + try: + import json as _json + + statuses = client.get_ticket_statuses() + settings.autotask_cached_ticket_statuses_json = _json.dumps(statuses) + settings.autotask_reference_last_sync_at = datetime.utcnow() + db.session.commit() + for x in (statuses or []): + if isinstance(x, dict) and "value" in x: + status_map[str(x.get("value"))] = str(x.get("label") or "") + except Exception: + # Best-effort; list will still work without labels. + pass + + tickets = client.query_tickets_for_company( + int(customer.autotask_company_id), + search=q, + exclude_status_ids=sorted(AUTOTASK_TERMINAL_STATUS_IDS), + limit=75, + ) + except Exception as exc: + return jsonify({"status": "error", "message": f"Autotask ticket lookup failed: {exc}"}), 400 + + items = [] + for t in tickets or []: + if not isinstance(t, dict): + continue + tid = t.get("id") + tnum = (t.get("ticketNumber") or t.get("number") or "") + title = (t.get("title") or "") + st = t.get("status") + try: + st_int = int(st) if st is not None else None + except Exception: + st_int = None + st_label = status_map.get(str(st_int)) if st_int is not None else "" + items.append( + { + "id": tid, + "ticketNumber": str(tnum or ""), + "title": str(title or ""), + "status": st_int, + "statusLabel": st_label or "", + } + ) + + # Sort: newest-ish first. Autotask query ordering isn't guaranteed, so we provide a stable sort. + items.sort(key=lambda x: (x.get("ticketNumber") or ""), reverse=True) + + return jsonify({"status": "ok", "items": items}) + + +@main_bp.post("/api/run-checks/autotask-link-existing-ticket") +@login_required +@roles_required("admin", "operator") +def api_run_checks_autotask_link_existing_ticket(): + """Link an existing Autotask ticket to the selected run (and propagate to all active runs of the job). + + Phase 2.2: used by the Run Checks modal. + """ + + data = request.get_json(silent=True) or {} + + try: + run_id = int(data.get("run_id") or 0) + except Exception: + run_id = 0 + + try: + ticket_id = int(data.get("ticket_id") or 0) + except Exception: + ticket_id = 0 + + if run_id <= 0 or ticket_id <= 0: + return jsonify({"status": "error", "message": "Invalid parameters."}), 400 + + run = JobRun.query.get(run_id) + if not run: + return jsonify({"status": "error", "message": "Run not found."}), 404 + + # Do not overwrite an existing link unless the current one is resolved/deleted. + if getattr(run, "autotask_ticket_id", None): + return jsonify({"status": "error", "message": "Run already has an Autotask ticket linked."}), 400 + + job = Job.query.get(run.job_id) + if not job: + return jsonify({"status": "error", "message": "Job not found."}), 404 + + customer = Customer.query.get(job.customer_id) if getattr(job, "customer_id", None) else None + if not customer: + return jsonify({"status": "error", "message": "Customer not found."}), 404 + + if not getattr(customer, "autotask_company_id", None): + return jsonify({"status": "error", "message": "Customer has no Autotask company mapping."}), 400 + + if (getattr(customer, "autotask_mapping_status", None) or "").strip().lower() not in ("ok", "renamed"): + return jsonify({"status": "error", "message": "Autotask company mapping is not valid."}), 400 + + try: + client = _build_autotask_client_from_settings() + t = client.get_ticket(ticket_id) + except Exception as exc: + return jsonify({"status": "error", "message": f"Autotask ticket retrieval failed: {exc}"}), 400 + + if not isinstance(t, dict): + return jsonify({"status": "error", "message": "Autotask did not return a ticket object."}), 400 + + # Enforce company scope. + try: + t_company = int(t.get("companyID") or 0) + except Exception: + t_company = 0 + + if t_company != int(customer.autotask_company_id): + return jsonify({"status": "error", "message": "Selected ticket does not belong to the mapped Autotask company."}), 400 + + tnum = (t.get("ticketNumber") or t.get("number") or "") + tnum = str(tnum or "").strip() + if not tnum: + return jsonify({"status": "error", "message": "Autotask ticket does not have a ticket number."}), 400 + + # Block terminal tickets from being linked (Phase 2.2 only lists open tickets, but enforce server-side). + try: + st = int(t.get("status")) if t.get("status") is not None else 0 + except Exception: + st = 0 + if st in AUTOTASK_TERMINAL_STATUS_IDS: + return jsonify({"status": "error", "message": "Cannot link a terminal/completed Autotask ticket."}), 400 + + now = datetime.utcnow() + + run.autotask_ticket_id = int(ticket_id) + run.autotask_ticket_number = tnum + run.autotask_ticket_created_at = now + run.autotask_ticket_created_by_user_id = current_user.id + + # Propagate linkage to all active (unreviewed) runs of the same job. + active_runs = ( + JobRun.query.filter(JobRun.job_id == job.id, JobRun.reviewed_at.is_(None)).order_by(JobRun.id.asc()).all() + ) + run_ids = [] + for rr in active_runs or []: + if getattr(rr, "id", None) is None: + continue + rr.autotask_ticket_id = int(ticket_id) + rr.autotask_ticket_number = tnum + if getattr(rr, "autotask_ticket_created_at", None) is None: + rr.autotask_ticket_created_at = now + if getattr(rr, "autotask_ticket_created_by_user_id", None) is None: + rr.autotask_ticket_created_by_user_id = current_user.id + run_ids.append(int(rr.id)) + + # Ensure internal Ticket + TicketJobRun linkage for legacy ticket behavior. + internal_ticket = None + try: + internal_ticket = _ensure_internal_ticket_for_autotask( + ticket_number=tnum, + job=job, + run_ids=run_ids, + now=now, + active_from_dt=now, + ) + except Exception: + internal_ticket = None + + try: + db.session.commit() + except Exception: + db.session.rollback() + return jsonify({"status": "error", "message": "Failed to persist Autotask ticket link."}), 500 + + return jsonify( + { + "status": "ok", + "ticket_id": int(ticket_id), + "ticket_number": tnum, + "internal_ticket_id": int(getattr(internal_ticket, "id", 0) or 0) if internal_ticket else 0, + } + ) + + +@main_bp.post("/api/run-checks/autotask-resolve-note") +@login_required +@roles_required("admin", "operator") +def api_run_checks_autotask_resolve_note(): + """Post a 'should be resolved' update to an existing Autotask ticket. + + This first-step implementation does NOT close the ticket in Autotask. + It updates the Ticket description via PUT /Tickets (TicketNotes create is + not reliably supported across tenants). + """ + + data = request.get_json(silent=True) or {} + + try: + run_id = int(data.get("run_id") or 0) + except Exception: + run_id = 0 + + if run_id <= 0: + return jsonify({"status": "error", "message": "Invalid parameters."}), 400 + + run = JobRun.query.get(run_id) + if not run: + return jsonify({"status": "error", "message": "Run not found."}), 404 + + if not getattr(run, "autotask_ticket_id", None): + return jsonify({"status": "error", "message": "Run has no Autotask ticket linked."}), 400 + + try: + ticket_id = int(run.autotask_ticket_id) + except Exception: + ticket_id = 0 + + if ticket_id <= 0: + return jsonify({"status": "error", "message": "Run has an invalid Autotask ticket id."}), 400 + + try: + client = _build_autotask_client_from_settings() + t = client.get_ticket(ticket_id) + except Exception as exc: + return jsonify({"status": "error", "message": f"Autotask ticket retrieval failed: {exc}"}), 400 + + if not isinstance(t, dict): + return jsonify({"status": "error", "message": "Autotask did not return a ticket object."}), 400 + + # Build an update payload based on known required fields from Postman validation. + required_fields = ["id", "companyID", "queueID", "title", "priority", "status", "dueDateTime"] + missing = [f for f in required_fields if t.get(f) in (None, "")] + if missing: + return jsonify( + { + "status": "error", + "message": "Cannot safely update Autotask ticket because required fields are missing: " + + ", ".join(missing), + } + ), 400 + + existing_desc = str(t.get("description") or "") + + now = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%SZ") + actor = (getattr(current_user, "email", None) or getattr(current_user, "username", None) or "operator") + marker = "[Backupchecks] Marked as resolved in Backupchecks" + note = f"\n\n{marker} (ticket remains open in Autotask). {now} by {actor}" + + # Avoid runaway growth if the same action is clicked multiple times. + if marker in existing_desc: + new_desc = existing_desc + else: + new_desc = (existing_desc or "") + note + + payload = { + "id": int(t.get("id")), + "companyID": t.get("companyID"), + "queueID": t.get("queueID"), + "title": t.get("title"), + "priority": t.get("priority"), + "status": t.get("status"), + "dueDateTime": t.get("dueDateTime"), + "description": new_desc, + } + + try: + client.update_ticket(payload) + except Exception as exc: + return jsonify({"status": "error", "message": f"Autotask ticket update failed: {exc}"}), 400 + + return jsonify({"status": "ok"}) + + @main_bp.post("/api/run-checks/mark-reviewed") @login_required @roles_required("admin", "operator") diff --git a/containers/backupchecks/src/backend/app/main/routes_settings.py b/containers/backupchecks/src/backend/app/main/routes_settings.py index 7018135..96dba4d 100644 --- a/containers/backupchecks/src/backend/app/main/routes_settings.py +++ b/containers/backupchecks/src/backend/app/main/routes_settings.py @@ -1,5 +1,7 @@ from .routes_shared import * # noqa: F401,F403 from .routes_shared import _get_database_size_bytes, _get_or_create_settings, _format_bytes, _get_free_disk_bytes, _log_admin_event +import json +from datetime import datetime @main_bp.route("/settings/jobs/delete-all", methods=["POST"]) @login_required @@ -405,6 +407,8 @@ def settings(): section = (request.args.get("section") or "general").strip().lower() or "general" if request.method == "POST": + autotask_form_touched = any(str(k).startswith("autotask_") for k in (request.form or {}).keys()) + # NOTE: The Settings UI has multiple tabs with separate forms. # Only update values that are present in the submitted form, to avoid # clearing unrelated settings when saving from another tab. @@ -430,6 +434,61 @@ def settings(): if "ui_timezone" in request.form: settings.ui_timezone = (request.form.get("ui_timezone") or "").strip() or "Europe/Amsterdam" + # Autotask integration + if "autotask_enabled" in request.form: + settings.autotask_enabled = bool(request.form.get("autotask_enabled")) + + if "autotask_environment" in request.form: + env_val = (request.form.get("autotask_environment") or "").strip().lower() + if env_val in ("sandbox", "production"): + settings.autotask_environment = env_val + else: + settings.autotask_environment = None + + if "autotask_api_username" in request.form: + settings.autotask_api_username = (request.form.get("autotask_api_username") or "").strip() or None + + if "autotask_api_password" in request.form: + pw = (request.form.get("autotask_api_password") or "").strip() + if pw: + settings.autotask_api_password = pw + + if "autotask_tracking_identifier" in request.form: + settings.autotask_tracking_identifier = (request.form.get("autotask_tracking_identifier") or "").strip() or None + + if "autotask_base_url" in request.form: + settings.autotask_base_url = (request.form.get("autotask_base_url") or "").strip() or None + + if "autotask_default_queue_id" in request.form: + try: + settings.autotask_default_queue_id = int(request.form.get("autotask_default_queue_id") or 0) or None + except (ValueError, TypeError): + pass + + if "autotask_default_ticket_source_id" in request.form: + try: + settings.autotask_default_ticket_source_id = int(request.form.get("autotask_default_ticket_source_id") or 0) or None + except (ValueError, TypeError): + pass + + if "autotask_default_ticket_status" in request.form: + try: + settings.autotask_default_ticket_status = int(request.form.get("autotask_default_ticket_status") or 0) or None + except (ValueError, TypeError): + pass + + if "autotask_priority_warning" in request.form: + try: + settings.autotask_priority_warning = int(request.form.get("autotask_priority_warning") or 0) or None + except (ValueError, TypeError): + pass + + if "autotask_priority_error" in request.form: + try: + settings.autotask_priority_error = int(request.form.get("autotask_priority_error") or 0) or None + except (ValueError, TypeError): + pass + # Daily Jobs if "daily_jobs_start_date" in request.form: daily_jobs_start_date_str = (request.form.get("daily_jobs_start_date") or "").strip() @@ -506,6 +565,48 @@ def settings(): db.session.commit() flash("Settings have been saved.", "success") + # Autotask ticket defaults depend on reference data (queues, sources, statuses, priorities). + # When the Autotask integration is (re)configured, auto-refresh the cached reference data + # once so the dropdowns become usable immediately. + try: + if ( + autotask_form_touched + and bool(getattr(settings, "autotask_enabled", False)) + and bool(getattr(settings, "autotask_api_username", None)) + and bool(getattr(settings, "autotask_api_password", None)) + and bool(getattr(settings, "autotask_tracking_identifier", None)) + ): + missing_cache = ( + not bool(getattr(settings, "autotask_cached_queues_json", None)) + or not bool(getattr(settings, "autotask_cached_ticket_sources_json", None)) + or not bool(getattr(settings, "autotask_cached_ticket_statuses_json", None)) + or not bool(getattr(settings, "autotask_cached_priorities_json", None)) + ) + + if missing_cache: + queues, sources, statuses, pr_out = _refresh_autotask_reference_data(settings) + db.session.commit() + flash( + f"Autotask reference data refreshed. Queues: {len(queues)}. Ticket Sources: {len(sources)}. Ticket Statuses: {len(statuses)}. Priorities: {len(pr_out)}.", + "success", + ) + _log_admin_event( + "autotask_reference_data_auto_refreshed", + "Autotask reference data auto-refreshed after settings save.", + details=json.dumps({"queues": len(queues or []), "ticket_sources": len(sources or []), "ticket_statuses": len(statuses or []), "priorities": len(pr_out)}), + ) + except Exception as exc: + try: + db.session.rollback() + except Exception: + pass + flash(f"Autotask reference data refresh failed: {exc}", "warning") + _log_admin_event( + "autotask_reference_data_auto_refresh_failed", + "Autotask reference data auto-refresh failed after settings save.", + details=json.dumps({"error": str(exc)}), + ) + # If EML storage has been turned off, clear any stored blobs immediately. try: if getattr(settings, "ingest_eml_retention_days", 7) == 0: @@ -537,6 +638,7 @@ def settings(): free_disk_warning = free_disk_bytes < two_gb has_client_secret = bool(settings.graph_client_secret) + has_autotask_password = bool(getattr(settings, "autotask_api_password", None)) # Common UI timezones (IANA names) tz_options = [ @@ -595,6 +697,37 @@ def settings(): except Exception: admin_users_count = 0 + # Autotask cached reference data for dropdowns + autotask_queues = [] + autotask_ticket_sources = [] + autotask_priorities = [] + autotask_ticket_statuses = [] + autotask_last_sync_at = getattr(settings, "autotask_reference_last_sync_at", None) + + try: + if getattr(settings, "autotask_cached_queues_json", None): + autotask_queues = json.loads(settings.autotask_cached_queues_json) or [] + except Exception: + autotask_queues = [] + + try: + if getattr(settings, "autotask_cached_ticket_sources_json", None): + autotask_ticket_sources = json.loads(settings.autotask_cached_ticket_sources_json) or [] + except Exception: + autotask_ticket_sources = [] + + try: + if getattr(settings, "autotask_cached_priorities_json", None): + autotask_priorities = json.loads(settings.autotask_cached_priorities_json) or [] + except Exception: + autotask_priorities = [] + + try: + if getattr(settings, "autotask_cached_ticket_statuses_json", None): + autotask_ticket_statuses = json.loads(settings.autotask_cached_ticket_statuses_json) or [] + except Exception: + autotask_ticket_statuses = [] + return render_template( "main/settings.html", settings=settings, @@ -602,10 +735,16 @@ def settings(): free_disk_human=free_disk_human, free_disk_warning=free_disk_warning, has_client_secret=has_client_secret, + has_autotask_password=has_autotask_password, tz_options=tz_options, users=users, admin_users_count=admin_users_count, section=section, + autotask_queues=autotask_queues, + autotask_ticket_sources=autotask_ticket_sources, + autotask_priorities=autotask_priorities, + autotask_ticket_statuses=autotask_ticket_statuses, + autotask_last_sync_at=autotask_last_sync_at, news_admin_items=news_admin_items, news_admin_stats=news_admin_stats, ) @@ -1172,3 +1311,147 @@ def settings_folders(): except Exception: pass return jsonify({"status": "error", "message": str(exc) or "Failed to load folders."}), 500 + + +@main_bp.route("/settings/autotask/test-connection", methods=["POST"]) +@login_required +@roles_required("admin") +def settings_autotask_test_connection(): + settings = _get_or_create_settings() + + if not settings.autotask_api_username or not settings.autotask_api_password or not settings.autotask_tracking_identifier: + flash("Autotask settings incomplete. Provide username, password and tracking identifier first.", "warning") + return redirect(url_for("main.settings", section="integrations")) + + try: + from ..integrations.autotask.client import AutotaskClient + client = AutotaskClient( + username=settings.autotask_api_username, + password=settings.autotask_api_password, + api_integration_code=settings.autotask_tracking_identifier, + environment=(settings.autotask_environment or "production"), + ) + zone = client.get_zone_info() + # Lightweight authenticated calls to validate credentials and basic API access + _ = client.get_queues() + _ = client.get_ticket_sources() + flash(f"Autotask connection OK. Zone: {zone.zone_name or 'unknown'}.", "success") + _log_admin_event( + "autotask_test_connection", + "Autotask test connection succeeded.", + details=json.dumps({"zone": zone.zone_name, "api_url": zone.api_url}), + ) + except Exception as exc: + flash(f"Autotask connection failed: {exc}", "danger") + _log_admin_event( + "autotask_test_connection_failed", + "Autotask test connection failed.", + details=json.dumps({"error": str(exc)}), + ) + + return redirect(url_for("main.settings", section="integrations")) + + +def _refresh_autotask_reference_data(settings): + """Refresh and persist Autotask reference data used for ticket default dropdowns.""" + from ..integrations.autotask.client import AutotaskClient + + client = AutotaskClient( + username=settings.autotask_api_username, + password=settings.autotask_api_password, + api_integration_code=settings.autotask_tracking_identifier, + environment=(settings.autotask_environment or "production"), + ) + + queues = client.get_queues() + sources = client.get_ticket_sources() + priorities = client.get_ticket_priorities() + statuses = client.get_ticket_statuses() + + # Store a minimal subset for dropdowns (id + name/label) + # Note: Some "reference" values are exposed as picklists (value/label) + # instead of entity collections (id/name). We normalize both shapes. + def _norm(items): + out = [] + for it in items or []: + if not isinstance(it, dict): + continue + _id = it.get("id") + if _id is None: + _id = it.get("value") + + name = ( + it.get("name") + or it.get("label") + or it.get("queueName") + or it.get("sourceName") + or it.get("description") + or "" + ) + try: + _id_int = int(_id) + except Exception: + continue + out.append({"id": _id_int, "name": str(name)}) + # Sort by name for stable dropdowns + out.sort(key=lambda x: (x.get("name") or "").lower()) + return out + + settings.autotask_cached_queues_json = json.dumps(_norm(queues)) + settings.autotask_cached_ticket_sources_json = json.dumps(_norm(sources)) + settings.autotask_cached_ticket_statuses_json = json.dumps(_norm(statuses)) + + # Priorities are returned as picklist values (value/label) + pr_out = [] + for it in priorities or []: + if not isinstance(it, dict): + continue + if it.get("isActive") is False: + continue + val = it.get("value") + label = it.get("label") or it.get("name") or "" + try: + val_int = int(val) + except Exception: + continue + pr_out.append({"id": val_int, "name": str(label)}) + pr_out.sort(key=lambda x: (x.get("name") or "").lower()) + + settings.autotask_cached_priorities_json = json.dumps(pr_out) + settings.autotask_reference_last_sync_at = datetime.utcnow() + + return queues, sources, statuses, pr_out + + +@main_bp.route("/settings/autotask/refresh-reference-data", methods=["POST"]) +@login_required +@roles_required("admin") +def settings_autotask_refresh_reference_data(): + settings = _get_or_create_settings() + + if not settings.autotask_api_username or not settings.autotask_api_password or not settings.autotask_tracking_identifier: + flash("Autotask settings incomplete. Provide username, password and tracking identifier first.", "warning") + return redirect(url_for("main.settings", section="integrations")) + + try: + queues, sources, statuses, pr_out = _refresh_autotask_reference_data(settings) + db.session.commit() + + flash( + f"Autotask reference data refreshed. Queues: {len(queues)}. Ticket Sources: {len(sources)}. Ticket Statuses: {len(statuses)}. Priorities: {len(pr_out)}.", + "success", + ) + _log_admin_event( + "autotask_refresh_reference_data", + "Autotask reference data refreshed.", + details=json.dumps({"queues": len(queues or []), "ticket_sources": len(sources or []), "ticket_statuses": len(statuses or []), "priorities": len(pr_out)}), + ) + except Exception as exc: + flash(f"Failed to refresh Autotask reference data: {exc}", "danger") + _log_admin_event( + "autotask_refresh_reference_data_failed", + "Autotask reference data refresh failed.", + details=json.dumps({"error": str(exc)}), + ) + + return redirect(url_for("main.settings", section="integrations")) diff --git a/containers/backupchecks/src/backend/app/migrations.py b/containers/backupchecks/src/backend/app/migrations.py index 334be39..80061d7 100644 --- a/containers/backupchecks/src/backend/app/migrations.py +++ b/containers/backupchecks/src/backend/app/migrations.py @@ -22,6 +22,51 @@ def _is_column_nullable(table_name: str, column_name: str) -> bool: return False +def _column_exists_on_conn(conn, table_name: str, column_name: str) -> bool: + """Return True if the given column exists using the provided connection. + + This helper is useful inside engine.begin() blocks so we can check + column existence without creating a new inspector/connection. + """ + result = conn.execute( + text( + """ + SELECT 1 + FROM information_schema.columns + WHERE table_name = :table + AND column_name = :column + LIMIT 1 + """ + ), + {"table": table_name, "column": column_name}, + ) + return result.first() is not None + + +def _get_table_columns(conn, table_name: str) -> set[str]: + """Return a set of column names for the given table using the provided connection. + + This helper is designed for use inside engine.begin() blocks so that any + errors are properly rolled back before the connection is returned to the pool. + + If the table does not exist (or cannot be inspected), an empty set is returned. + """ + try: + result = conn.execute( + text( + """ + SELECT column_name + FROM information_schema.columns + WHERE table_name = :table + """ + ), + {"table": table_name}, + ) + return {row[0] for row in result.fetchall()} + except Exception: + return set() + + def migrate_add_username_to_users() -> None: """Ensure users.username column exists and is NOT NULL and UNIQUE. @@ -127,6 +172,84 @@ def migrate_system_settings_ui_timezone() -> None: except Exception as exc: print(f"[migrations] Failed to migrate system_settings.ui_timezone: {exc}") +def migrate_system_settings_autotask_integration() -> None: + """Add Autotask integration columns to system_settings if missing.""" + + table = "system_settings" + + columns = [ + ("autotask_enabled", "BOOLEAN NOT NULL DEFAULT FALSE"), + ("autotask_environment", "VARCHAR(32) NULL"), + ("autotask_api_username", "VARCHAR(255) NULL"), + ("autotask_api_password", "VARCHAR(255) NULL"), + ("autotask_tracking_identifier", "VARCHAR(255) NULL"), + ("autotask_base_url", "VARCHAR(512) NULL"), + ("autotask_default_queue_id", "INTEGER NULL"), + ("autotask_default_ticket_source_id", "INTEGER NULL"), + ("autotask_default_ticket_status", "INTEGER NULL"), + ("autotask_priority_warning", "INTEGER NULL"), + ("autotask_priority_error", "INTEGER NULL"), + ("autotask_cached_queues_json", "TEXT NULL"), + ("autotask_cached_ticket_sources_json", "TEXT NULL"), + ("autotask_cached_priorities_json", "TEXT NULL"), + ("autotask_cached_ticket_statuses_json", "TEXT NULL"), + ("autotask_reference_last_sync_at", "TIMESTAMP NULL"), + ] + + try: + engine = db.get_engine() + except Exception as exc: + print(f"[migrations] Could not get engine for system_settings autotask migration: {exc}") + return + + try: + with engine.begin() as conn: + for column, ddl in columns: + if _column_exists_on_conn(conn, table, column): + continue + conn.execute(text(f'ALTER TABLE "{table}" ADD COLUMN {column} {ddl}')) + print("[migrations] migrate_system_settings_autotask_integration completed.") + except Exception as exc: + print(f"[migrations] Failed to migrate system_settings autotask integration columns: {exc}") + + +def migrate_customers_autotask_company_mapping() -> None: + """Add Autotask company mapping columns to customers if missing. + + Columns: + - autotask_company_id (INTEGER NULL) + - autotask_company_name (VARCHAR(255) NULL) + - autotask_mapping_status (VARCHAR(20) NULL) + - autotask_last_sync_at (TIMESTAMP NULL) + """ + + table = "customers" + columns = [ + ("autotask_company_id", "INTEGER NULL"), + ("autotask_company_name", "VARCHAR(255) NULL"), + ("autotask_mapping_status", "VARCHAR(20) NULL"), + ("autotask_last_sync_at", "TIMESTAMP NULL"), + ] + + try: + engine = db.get_engine() + except Exception as exc: + print(f"[migrations] Could not get engine for customers autotask mapping migration: {exc}") + return + + try: + with engine.begin() as conn: + for column, ddl in columns: + if _column_exists_on_conn(conn, table, column): + continue + conn.execute(text(f'ALTER TABLE "{table}" ADD COLUMN {column} {ddl}')) + print("[migrations] migrate_customers_autotask_company_mapping completed.") + except Exception as exc: + print(f"[migrations] Failed to migrate customers autotask company mapping columns: {exc}") + + + + def migrate_mail_messages_columns() -> None: @@ -779,6 +902,8 @@ def run_migrations() -> None: migrate_system_settings_auto_import_cutoff_date() migrate_system_settings_daily_jobs_start_date() migrate_system_settings_ui_timezone() + migrate_system_settings_autotask_integration() + migrate_customers_autotask_company_mapping() migrate_mail_messages_columns() migrate_mail_messages_parse_columns() migrate_mail_messages_approval_columns() @@ -793,10 +918,14 @@ def run_migrations() -> None: migrate_feedback_tables() migrate_feedback_replies_table() migrate_tickets_active_from_date() + migrate_tickets_resolved_origin() migrate_remarks_active_from_date() migrate_overrides_match_columns() migrate_job_runs_review_tracking() migrate_job_runs_override_metadata() + migrate_job_runs_autotask_ticket_fields() + migrate_job_runs_autotask_ticket_deleted_fields() + migrate_job_runs_autotask_ticket_deleted_by_name_fields() migrate_jobs_archiving() migrate_news_tables() migrate_reporting_tables() @@ -804,6 +933,147 @@ def run_migrations() -> None: print("[migrations] All migrations completed.") +def migrate_job_runs_autotask_ticket_fields() -> None: + """Add Autotask ticket linkage fields to job_runs if missing. + + Columns: + - job_runs.autotask_ticket_id (INTEGER NULL) + - job_runs.autotask_ticket_number (VARCHAR(64) NULL) + - job_runs.autotask_ticket_created_at (TIMESTAMP NULL) + - job_runs.autotask_ticket_created_by_user_id (INTEGER NULL, FK users.id) + """ + + table = "job_runs" + try: + engine = db.get_engine() + except Exception as exc: + print(f"[migrations] Could not get engine for job_runs Autotask ticket migration: {exc}") + return + + try: + with engine.begin() as conn: + cols = _get_table_columns(conn, table) + if not cols: + print("[migrations] job_runs table not found; skipping migrate_job_runs_autotask_ticket_fields.") + return + + if "autotask_ticket_id" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_id column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_id INTEGER')) + + if "autotask_ticket_number" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_number column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_number VARCHAR(64)')) + + if "autotask_ticket_created_at" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_created_at column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_created_at TIMESTAMP')) + + if "autotask_ticket_created_by_user_id" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_created_by_user_id column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_created_by_user_id INTEGER')) + + try: + conn.execute( + text( + 'ALTER TABLE "job_runs" ' + 'ADD CONSTRAINT job_runs_autotask_ticket_created_by_user_id_fkey ' + 'FOREIGN KEY (autotask_ticket_created_by_user_id) REFERENCES users(id) ' + 'ON DELETE SET NULL' + ) + ) + except Exception as exc: + print( + f"[migrations] Could not add FK job_runs_autotask_ticket_created_by_user_id -> users.id (continuing): {exc}" + ) + + conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_id ON "job_runs" (autotask_ticket_id)')) + except Exception as exc: + print(f"[migrations] migrate_job_runs_autotask_ticket_fields failed (continuing): {exc}") + return + + print("[migrations] migrate_job_runs_autotask_ticket_fields completed.") + + +def migrate_job_runs_autotask_ticket_deleted_fields() -> None: + """Add Autotask deleted ticket audit fields to job_runs if missing. + + Columns: + - job_runs.autotask_ticket_deleted_at (TIMESTAMP NULL) + - job_runs.autotask_ticket_deleted_by_resource_id (INTEGER NULL) + """ + + table = "job_runs" + try: + engine = db.get_engine() + except Exception as exc: + print(f"[migrations] Could not get engine for job_runs Autotask ticket deleted fields migration: {exc}") + return + + try: + with engine.begin() as conn: + cols = _get_table_columns(conn, table) + if not cols: + print("[migrations] job_runs table not found; skipping migrate_job_runs_autotask_ticket_deleted_fields.") + return + + if "autotask_ticket_deleted_at" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_deleted_at column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_at TIMESTAMP')) + + if "autotask_ticket_deleted_by_resource_id" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_deleted_by_resource_id column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_resource_id INTEGER')) + + conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_resource_id ON "job_runs" (autotask_ticket_deleted_by_resource_id)')) + + conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_at ON "job_runs" (autotask_ticket_deleted_at)')) + except Exception as exc: + print(f"[migrations] migrate_job_runs_autotask_ticket_deleted_fields failed (continuing): {exc}") + return + + print("[migrations] migrate_job_runs_autotask_ticket_deleted_fields completed.") + + +def migrate_job_runs_autotask_ticket_deleted_by_name_fields() -> None: + """Add Autotask deleted-by name audit fields to job_runs if missing. + + Columns: + - job_runs.autotask_ticket_deleted_by_first_name (VARCHAR(255) NULL) + - job_runs.autotask_ticket_deleted_by_last_name (VARCHAR(255) NULL) + """ + + table = "job_runs" + + try: + engine = db.get_engine() + except Exception as exc: + print(f"[migrations] Could not get engine for job_runs Autotask deleted-by name fields migration: {exc}") + return + + try: + with engine.begin() as conn: + cols = _get_table_columns(conn, table) + if not cols: + print("[migrations] job_runs table not found; skipping migrate_job_runs_autotask_ticket_deleted_by_name_fields.") + return + + if "autotask_ticket_deleted_by_first_name" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_deleted_by_first_name column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_first_name VARCHAR(255)')) + + if "autotask_ticket_deleted_by_last_name" not in cols: + print("[migrations] Adding job_runs.autotask_ticket_deleted_by_last_name column...") + conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_deleted_by_last_name VARCHAR(255)')) + + conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_first_name ON "job_runs" (autotask_ticket_deleted_by_first_name)')) + conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_deleted_by_last_name ON "job_runs" (autotask_ticket_deleted_by_last_name)')) + except Exception as exc: + print(f"[migrations] migrate_job_runs_autotask_ticket_deleted_by_name_fields failed (continuing): {exc}") + + print("[migrations] migrate_job_runs_autotask_ticket_deleted_by_name_fields completed.") + + def migrate_jobs_archiving() -> None: """Add archiving columns to jobs if missing. @@ -1090,6 +1360,34 @@ def migrate_tickets_active_from_date() -> None: + +def migrate_tickets_resolved_origin() -> None: + """Add tickets.resolved_origin column if missing. + + Used to show whether a ticket was resolved by PSA polling or manually inside Backupchecks. + """ + + table = "tickets" + try: + engine = db.get_engine() + except Exception as exc: + print(f"[migrations] Could not get engine for tickets resolved_origin migration: {exc}") + return + + try: + with engine.begin() as conn: + cols = _get_table_columns(conn, table) + if not cols: + print("[migrations] tickets table not found; skipping migrate_tickets_resolved_origin.") + return + if "resolved_origin" not in cols: + print("[migrations] Adding tickets.resolved_origin column...") + conn.execute(text('ALTER TABLE "tickets" ADD COLUMN resolved_origin VARCHAR(32)')) + except Exception as exc: + print(f"[migrations] tickets resolved_origin migration failed (continuing): {exc}") + + print("[migrations] migrate_tickets_resolved_origin completed.") + def migrate_mail_messages_overall_message() -> None: """Add overall_message column to mail_messages if missing.""" table = "mail_messages" diff --git a/containers/backupchecks/src/backend/app/models.py b/containers/backupchecks/src/backend/app/models.py index 3d23da6..620c99e 100644 --- a/containers/backupchecks/src/backend/app/models.py +++ b/containers/backupchecks/src/backend/app/models.py @@ -107,6 +107,28 @@ class SystemSettings(db.Model): # UI display timezone (IANA name). Used for rendering times in the web interface. ui_timezone = db.Column(db.String(64), nullable=False, default="Europe/Amsterdam") + + # Autotask integration settings + autotask_enabled = db.Column(db.Boolean, nullable=False, default=False) + autotask_environment = db.Column(db.String(32), nullable=True) # sandbox | production + autotask_api_username = db.Column(db.String(255), nullable=True) + autotask_api_password = db.Column(db.String(255), nullable=True) + autotask_tracking_identifier = db.Column(db.String(255), nullable=True) + autotask_base_url = db.Column(db.String(512), nullable=True) # Backupchecks base URL for deep links + + # Autotask defaults (IDs are leading) + autotask_default_queue_id = db.Column(db.Integer, nullable=True) + autotask_default_ticket_source_id = db.Column(db.Integer, nullable=True) + autotask_default_ticket_status = db.Column(db.Integer, nullable=True) + autotask_priority_warning = db.Column(db.Integer, nullable=True) + autotask_priority_error = db.Column(db.Integer, nullable=True) + + # Cached reference data (for dropdowns) + autotask_cached_queues_json = db.Column(db.Text, nullable=True) + autotask_cached_ticket_sources_json = db.Column(db.Text, nullable=True) + autotask_cached_priorities_json = db.Column(db.Text, nullable=True) + autotask_cached_ticket_statuses_json = db.Column(db.Text, nullable=True) + autotask_reference_last_sync_at = db.Column(db.DateTime, nullable=True) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) updated_at = db.Column( db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False @@ -132,6 +154,14 @@ class Customer(db.Model): name = db.Column(db.String(255), unique=True, nullable=False) active = db.Column(db.Boolean, nullable=False, default=True) + # Autotask company mapping (Phase 3) + # Company ID is leading; name is cached for UI display. + autotask_company_id = db.Column(db.Integer, nullable=True) + autotask_company_name = db.Column(db.String(255), nullable=True) + # Mapping status: ok | renamed | missing | invalid + autotask_mapping_status = db.Column(db.String(20), nullable=True) + autotask_last_sync_at = db.Column(db.DateTime, nullable=True) + created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) updated_at = db.Column( db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False @@ -246,6 +276,17 @@ class JobRun(db.Model): reviewed_at = db.Column(db.DateTime, nullable=True) reviewed_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) + # Autotask integration (Phase 4: ticket creation from Run Checks) + autotask_ticket_id = db.Column(db.Integer, nullable=True) + autotask_ticket_number = db.Column(db.String(64), nullable=True) + autotask_ticket_created_at = db.Column(db.DateTime, nullable=True) + autotask_ticket_created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) + autotask_ticket_deleted_at = db.Column(db.DateTime, nullable=True) + autotask_ticket_deleted_by_resource_id = db.Column(db.Integer, nullable=True) + autotask_ticket_deleted_by_first_name = db.Column(db.String(255), nullable=True) + autotask_ticket_deleted_by_last_name = db.Column(db.String(255), nullable=True) + + created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) updated_at = db.Column( @@ -259,6 +300,8 @@ class JobRun(db.Model): reviewed_by = db.relationship("User", foreign_keys=[reviewed_by_user_id]) + autotask_ticket_created_by = db.relationship("User", foreign_keys=[autotask_ticket_created_by_user_id]) + class JobRunReviewEvent(db.Model): __tablename__ = "job_run_review_events" @@ -383,6 +426,8 @@ class Ticket(db.Model): # Audit timestamp: when the ticket was created (UTC, naive) start_date = db.Column(db.DateTime, nullable=False) resolved_at = db.Column(db.DateTime) + # Resolution origin for audit/UI: psa | backupchecks + resolved_origin = db.Column(db.String(32)) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) @@ -652,4 +697,4 @@ class ReportObjectSummary(db.Model): report = db.relationship( "ReportDefinition", backref=db.backref("object_summaries", lazy="dynamic", cascade="all, delete-orphan"), - ) \ No newline at end of file + ) diff --git a/containers/backupchecks/src/backend/app/ticketing_utils.py b/containers/backupchecks/src/backend/app/ticketing_utils.py new file mode 100644 index 0000000..23557c6 --- /dev/null +++ b/containers/backupchecks/src/backend/app/ticketing_utils.py @@ -0,0 +1,235 @@ +from __future__ import annotations + +from datetime import datetime, date, timezone +from typing import Iterable, Optional + +from zoneinfo import ZoneInfo + +from flask import current_app + +from sqlalchemy import text + +from .database import db +from .models import Job, JobRun, SystemSettings, Ticket, TicketJobRun, TicketScope + + +def _get_ui_timezone_name() -> str: + """Return the configured UI timezone name (IANA), with a safe fallback. + + NOTE: This must not import from any routes_* modules to avoid circular imports. + """ + + try: + settings = SystemSettings.query.first() + name = (getattr(settings, "ui_timezone", None) or "").strip() + if name: + return name + except Exception: + pass + + try: + return (current_app.config.get("TIMEZONE") or "Europe/Amsterdam").strip() + except Exception: + return "Europe/Amsterdam" + + +def _to_ui_date(dt_utc_naive: datetime | None) -> date | None: + """Convert a naive UTC datetime to a UI-local date.""" + if not dt_utc_naive: + return None + + try: + tz = ZoneInfo(_get_ui_timezone_name()) + except Exception: + tz = None + + if not tz: + return dt_utc_naive.date() + + try: + if dt_utc_naive.tzinfo is None: + dt_utc = dt_utc_naive.replace(tzinfo=timezone.utc) + else: + dt_utc = dt_utc_naive.astimezone(timezone.utc) + return dt_utc.astimezone(tz).date() + except Exception: + return dt_utc_naive.date() + + +def ensure_internal_ticket_for_job( + *, + ticket_code: str, + title: Optional[str], + description: str, + job: Job, + active_from_dt: Optional[datetime], + start_dt: Optional[datetime] = None, +) -> Ticket: + """Create/reuse an internal Ticket and ensure a job scope exists. + + This mirrors the legacy manual ticket workflow but allows arbitrary ticket codes + (e.g. Autotask ticket numbers). + """ + + now = datetime.utcnow() + start_dt = start_dt or now + + code = (ticket_code or "").strip().upper() + if not code: + raise ValueError("ticket_code is required") + + ticket = Ticket.query.filter_by(ticket_code=code).first() + if not ticket: + ticket = Ticket( + ticket_code=code, + title=title, + description=description, + active_from_date=_to_ui_date(active_from_dt) or _to_ui_date(start_dt) or start_dt.date(), + start_date=start_dt, + resolved_at=None, + ) + db.session.add(ticket) + db.session.flush() + + # Ensure an open job scope exists + scope = TicketScope.query.filter_by(ticket_id=ticket.id, scope_type="job", job_id=job.id).first() + if not scope: + scope = TicketScope( + ticket_id=ticket.id, + scope_type="job", + customer_id=job.customer_id, + backup_software=job.backup_software, + backup_type=job.backup_type, + job_id=job.id, + job_name_match=job.job_name, + job_name_match_mode="exact", + resolved_at=None, + ) + db.session.add(scope) + else: + # Re-open and refresh scope metadata (legacy behavior) + scope.resolved_at = None + scope.customer_id = job.customer_id + scope.backup_software = job.backup_software + scope.backup_type = job.backup_type + scope.job_name_match = job.job_name + scope.job_name_match_mode = "exact" + + return ticket + + +def ensure_ticket_jobrun_links( + *, + ticket_id: int, + run_ids: Iterable[int], + link_source: str, +) -> None: + """Idempotently ensure TicketJobRun links exist for all provided run IDs.""" + + run_ids_list = [int(x) for x in (run_ids or []) if x is not None] + if not run_ids_list: + return + + existing = set() + try: + rows = ( + db.session.execute( + text( + """ + SELECT job_run_id + FROM ticket_job_runs + WHERE ticket_id = :ticket_id + AND job_run_id = ANY(:run_ids) + """ + ), + {"ticket_id": int(ticket_id), "run_ids": run_ids_list}, + ) + .fetchall() + ) + existing = {int(rid) for (rid,) in rows if rid is not None} + except Exception: + existing = set() + + for rid in run_ids_list: + if rid in existing: + continue + db.session.add(TicketJobRun(ticket_id=int(ticket_id), job_run_id=int(rid), link_source=link_source)) + + +def link_open_internal_tickets_to_run(*, run: JobRun, job: Job) -> None: + """When a new run is created, link any currently open internal tickets for the job. + + This restores legacy behavior where a ticket stays visible for new runs until resolved. + Additionally (best-effort), if the job already has Autotask linkage on previous runs, + propagate that to the new run so PSA polling remains consistent. + """ + + if not run or not getattr(run, "id", None) or not job or not getattr(job, "id", None): + return + + ui_tz = _get_ui_timezone_name() + run_date = _to_ui_date(getattr(run, "run_at", None)) or _to_ui_date(datetime.utcnow()) + + # Find open tickets scoped to this job for the run date window. + # This matches the logic used by Job Details and Run Checks indicators. + rows = [] + try: + rows = ( + db.session.execute( + text( + """ + SELECT t.id, t.ticket_code + FROM tickets t + JOIN ticket_scopes ts ON ts.ticket_id = t.id + WHERE ts.job_id = :job_id + AND t.active_from_date <= :run_date + AND ( + COALESCE(ts.resolved_at, t.resolved_at) IS NULL + OR ((COALESCE(ts.resolved_at, t.resolved_at) AT TIME ZONE 'UTC' AT TIME ZONE :ui_tz)::date) >= :run_date + ) + ORDER BY t.start_date DESC, t.id DESC + """ + ), + {"job_id": int(job.id), "run_date": run_date, "ui_tz": ui_tz}, + ) + .fetchall() + ) + except Exception: + rows = [] + + if not rows: + return + + # Link all open tickets to this run (idempotent) + for tid, _code in rows: + if not TicketJobRun.query.filter_by(ticket_id=int(tid), job_run_id=int(run.id)).first(): + db.session.add(TicketJobRun(ticket_id=int(tid), job_run_id=int(run.id), link_source="inherit")) + + # Best-effort: propagate Autotask linkage if present on prior runs for the same ticket code. + # This allows new runs to keep the PSA ticket reference without requiring UI changes. + try: + if getattr(run, "autotask_ticket_id", None): + return + except Exception: + pass + + try: + # Use the newest ticket code to find a matching prior Autotask-linked run. + newest_code = (rows[0][1] or "").strip() + if not newest_code: + return + + prior = ( + JobRun.query.filter(JobRun.job_id == job.id) + .filter(JobRun.autotask_ticket_id.isnot(None)) + .filter(JobRun.autotask_ticket_number == newest_code) + .order_by(JobRun.id.desc()) + .first() + ) + if prior and getattr(prior, "autotask_ticket_id", None): + run.autotask_ticket_id = prior.autotask_ticket_id + run.autotask_ticket_number = prior.autotask_ticket_number + run.autotask_ticket_created_at = getattr(prior, "autotask_ticket_created_at", None) + run.autotask_ticket_created_by_user_id = getattr(prior, "autotask_ticket_created_by_user_id", None) + except Exception: + return diff --git a/containers/backupchecks/src/templates/main/customers.html b/containers/backupchecks/src/templates/main/customers.html index 153a546..024fb51 100644 --- a/containers/backupchecks/src/templates/main/customers.html +++ b/containers/backupchecks/src/templates/main/customers.html @@ -19,6 +19,11 @@ Export CSV + + {% if autotask_enabled and autotask_configured %} + + + {% endif %} {% endif %} @@ -29,6 +34,8 @@ Customer Active Number of jobs + Autotask company + Autotask mapping {% if can_manage %} Actions {% endif %} @@ -46,6 +53,7 @@ Inactive {% endif %} + {% if c.job_count > 0 %} {{ c.job_count }} @@ -53,6 +61,36 @@ 0 {% endif %} + + + {% if c.autotask_company_id %} + {{ c.autotask_company_name or 'Unknown' }} +
ID: {{ c.autotask_company_id }}
+ {% else %} + Not mapped + {% endif %} + + + + {% set st = (c.autotask_mapping_status or '').lower() %} + {% if not c.autotask_company_id %} + Not mapped + {% elif st == 'ok' %} + OK + {% elif st == 'renamed' %} + Renamed + {% elif st == 'missing' %} + Missing + {% elif st == 'invalid' %} + Invalid + {% else %} + Unknown + {% endif %} + + {% if c.autotask_last_sync_at %} +
Checked: {{ c.autotask_last_sync_at }}
+ {% endif %} + {% if can_manage %} @@ -82,7 +124,7 @@ {% endfor %} {% else %} - + No customers found. @@ -130,6 +172,36 @@ Active + +
+ +
Autotask mapping
+ {% if autotask_enabled and autotask_configured %} +
+
Current mapping
+
Not mapped
+
+
+ +
+ + +
+ +
+ +
+ + + +
+ +
+ {% else %} +
+ Autotask integration is not available. Enable and configure it in Settings → Extensions & Integrations → Autotask. +
+ {% endif %}