Auto-commit local changes before build (2026-01-19 11:11:08)
This commit is contained in:
parent
f8a57efee0
commit
82bdebb721
@ -1 +1 @@
|
||||
v20260116-12-autotask-ticket-sync-circular-import-fix
|
||||
v20260119-01-restoredto-v20260115-12-autotask-customers-refreshall-mappings
|
||||
|
||||
@ -1,16 +1,10 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutotaskZoneInfo:
|
||||
zone_name: str
|
||||
@ -43,29 +37,6 @@ class AutotaskClient:
|
||||
self._zone_info: Optional[AutotaskZoneInfo] = None
|
||||
self._zoneinfo_base_used: Optional[str] = None
|
||||
|
||||
def _debug_enabled(self) -> bool:
|
||||
"""Return True when verbose Autotask integration logging is enabled.
|
||||
|
||||
This is intentionally controlled via an environment variable to avoid
|
||||
writing sensitive payloads to logs by default.
|
||||
"""
|
||||
return str(os.getenv("BACKUPCHECKS_AUTOTASK_DEBUG", "")).strip().lower() in {
|
||||
"1",
|
||||
"true",
|
||||
"yes",
|
||||
"on",
|
||||
}
|
||||
|
||||
def _safe_json_preview(self, data: Any, max_chars: int = 1200) -> str:
|
||||
"""Serialize JSON-like data for logging, truncating large payloads."""
|
||||
try:
|
||||
s = json.dumps(data, ensure_ascii=False, default=str)
|
||||
except Exception:
|
||||
s = str(data)
|
||||
if len(s) > max_chars:
|
||||
return s[:max_chars] + "…"
|
||||
return s
|
||||
|
||||
def _zoneinfo_bases(self) -> List[str]:
|
||||
"""Return a list of zoneInformation base URLs to try.
|
||||
|
||||
@ -134,20 +105,13 @@ class AutotaskClient:
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
def _request_raw(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
json_body: Optional[Dict[str, Any]] = None,
|
||||
) -> requests.Response:
|
||||
"""Perform an Autotask REST API request and return the raw response."""
|
||||
def _request(self, method: str, path: str, params: Optional[Dict[str, Any]] = None) -> Any:
|
||||
zone = self.get_zone_info()
|
||||
base = zone.api_url.rstrip("/")
|
||||
url = f"{base}/v1.0/{path.lstrip('/')}"
|
||||
headers = self._headers()
|
||||
|
||||
def do_request(use_basic_auth: bool, extra_headers: Optional[Dict[str, str]] = None) -> requests.Response:
|
||||
def do_request(use_basic_auth: bool, extra_headers: Optional[Dict[str, str]] = None):
|
||||
h = dict(headers)
|
||||
if extra_headers:
|
||||
h.update(extra_headers)
|
||||
@ -156,7 +120,6 @@ class AutotaskClient:
|
||||
url=url,
|
||||
headers=h,
|
||||
params=params or None,
|
||||
json=json_body if json_body is not None else None,
|
||||
auth=(self.username, self.password) if use_basic_auth else None,
|
||||
timeout=self.timeout_seconds,
|
||||
)
|
||||
@ -179,7 +142,8 @@ class AutotaskClient:
|
||||
raise AutotaskError(
|
||||
"Authentication failed (HTTP 401). "
|
||||
"Verify API Username, API Secret, and ApiIntegrationCode. "
|
||||
f"Environment={self.environment}, ZoneInfoBase={zi_base}, ZoneApiUrl={zone.api_url}.",
|
||||
f"Environment={self.environment}, ZoneInfoBase={zi_base}, ZoneApiUrl={zone.api_url}."
|
||||
,
|
||||
status_code=401,
|
||||
)
|
||||
if resp.status_code == 403:
|
||||
@ -192,19 +156,6 @@ class AutotaskClient:
|
||||
if resp.status_code >= 400:
|
||||
raise AutotaskError(f"Autotask API error (HTTP {resp.status_code}).", status_code=resp.status_code)
|
||||
|
||||
return resp
|
||||
|
||||
def _request(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
json_body: Optional[Dict[str, Any]] = None,
|
||||
) -> Any:
|
||||
resp = self._request_raw(method=method, path=path, params=params, json_body=json_body)
|
||||
if not (resp.content or b""):
|
||||
return {}
|
||||
|
||||
try:
|
||||
return resp.json()
|
||||
except Exception as exc:
|
||||
@ -438,347 +389,3 @@ class AutotaskClient:
|
||||
raise AutotaskError("Tickets.priority metadata did not include picklist values.")
|
||||
|
||||
return self._call_picklist_values(picklist_values)
|
||||
|
||||
def get_ticket_statuses(self) -> List[Dict[str, Any]]:
|
||||
"""Return Ticket Status picklist values.
|
||||
|
||||
We retrieve this from Tickets field metadata to avoid hardcoded status IDs.
|
||||
"""
|
||||
return self._get_ticket_picklist_values(field_names=["status", "statusid"])
|
||||
|
||||
def get_ticket(self, ticket_id: int) -> Dict[str, Any]:
|
||||
"""Fetch a Ticket by ID via GET /Tickets/<id>."""
|
||||
if not isinstance(ticket_id, int) or ticket_id <= 0:
|
||||
raise AutotaskError("Invalid Autotask ticket id.")
|
||||
data = self._request("GET", f"Tickets/{ticket_id}")
|
||||
# Autotask commonly wraps single-entity GET results in an "item" object.
|
||||
# Normalize to the entity dict so callers can access fields like "id" and
|
||||
# "ticketNumber" without having to unwrap.
|
||||
if isinstance(data, dict) and data:
|
||||
if isinstance(data.get("item"), dict) and data.get("item"):
|
||||
return data["item"]
|
||||
|
||||
# Some endpoints/tenants may return a list even for a single ID.
|
||||
if isinstance(data.get("items"), list) and data.get("items"):
|
||||
first = data.get("items")[0]
|
||||
if isinstance(first, dict) and first:
|
||||
return first
|
||||
|
||||
return data
|
||||
raise AutotaskError("Autotask did not return a ticket object.")
|
||||
|
||||
def query_tickets_by_ids(
|
||||
self,
|
||||
ticket_ids: List[int],
|
||||
*,
|
||||
max_records_per_query: int = 200,
|
||||
corr_id: Optional[str] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Fetch multiple Tickets by id.
|
||||
|
||||
Preferred path:
|
||||
- Use GET Tickets/query with an 'in' filter over id.
|
||||
|
||||
Fallback path:
|
||||
- If the tenant does not support 'in' queries, fetch tickets individually
|
||||
via GET Tickets/<id>.
|
||||
|
||||
Returns a list of ticket objects (dicts) for tickets that exist.
|
||||
"""
|
||||
|
||||
# Normalize ids
|
||||
ids: List[int] = []
|
||||
for x in ticket_ids or []:
|
||||
try:
|
||||
xi = int(x)
|
||||
except Exception:
|
||||
continue
|
||||
if xi > 0:
|
||||
ids.append(xi)
|
||||
|
||||
# De-duplicate while preserving order
|
||||
seen = set()
|
||||
dedup: List[int] = []
|
||||
for xi in ids:
|
||||
if xi in seen:
|
||||
continue
|
||||
seen.add(xi)
|
||||
dedup.append(xi)
|
||||
|
||||
if not dedup:
|
||||
return []
|
||||
|
||||
corr = corr_id or uuid.uuid4().hex[:10]
|
||||
|
||||
def _chunk(lst: List[int], n: int) -> List[List[int]]:
|
||||
return [lst[i : i + n] for i in range(0, len(lst), n)]
|
||||
|
||||
out: List[Dict[str, Any]] = []
|
||||
|
||||
# Try query with op=in first (chunked)
|
||||
try:
|
||||
for chunk in _chunk(dedup, max(1, int(max_records_per_query))):
|
||||
search_payload: Dict[str, Any] = {
|
||||
"filter": [
|
||||
{"op": "in", "field": "id", "value": chunk},
|
||||
],
|
||||
"maxRecords": len(chunk),
|
||||
}
|
||||
params = {"search": json.dumps(search_payload)}
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] Tickets/query ids payload=%s",
|
||||
corr,
|
||||
self._safe_json_preview(search_payload, max_chars=1200),
|
||||
)
|
||||
data = self._request("GET", "Tickets/query", params=params)
|
||||
items = self._as_items_list(data)
|
||||
for it in items:
|
||||
if isinstance(it, dict) and it:
|
||||
out.append(it)
|
||||
return out
|
||||
except AutotaskError as exc:
|
||||
# Common tenant behavior: reject op=in with HTTP 400.
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] Tickets/query ids op=in failed; falling back to per-ticket GET. error=%s",
|
||||
corr,
|
||||
str(exc),
|
||||
)
|
||||
except Exception as exc:
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] Tickets/query ids unexpected error; falling back. error=%s",
|
||||
corr,
|
||||
str(exc),
|
||||
)
|
||||
|
||||
# Fallback: individual GET calls (best-effort)
|
||||
for tid in dedup:
|
||||
try:
|
||||
t = self.get_ticket(int(tid))
|
||||
if isinstance(t, dict) and t:
|
||||
out.append(t)
|
||||
except AutotaskError as exc:
|
||||
# 404 -> deleted/missing ticket, ignore
|
||||
if getattr(exc, "status_code", None) == 404:
|
||||
continue
|
||||
# Any other error: continue best-effort
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _lookup_created_ticket_id(
|
||||
self,
|
||||
tracking_identifier: str,
|
||||
company_id: Optional[int] = None,
|
||||
corr_id: Optional[str] = None,
|
||||
) -> Optional[int]:
|
||||
"""Lookup the most recently created ticket by tracking identifier.
|
||||
|
||||
Some Autotask tenants return an empty body and omit Location headers on
|
||||
successful POST /Tickets calls. In that case, we must lookup the created
|
||||
ticket deterministically via query.
|
||||
|
||||
We prefer filtering by CompanyID when available to reduce ambiguity.
|
||||
"""
|
||||
|
||||
tid = (tracking_identifier or "").strip()
|
||||
if not tid:
|
||||
return None
|
||||
|
||||
filters: List[Dict[str, Any]] = [
|
||||
{"op": "eq", "field": "TrackingIdentifier", "value": tid},
|
||||
]
|
||||
if isinstance(company_id, int) and company_id > 0:
|
||||
filters.append({"op": "eq", "field": "CompanyID", "value": int(company_id)})
|
||||
|
||||
# Order by createDate desc when supported; fall back to id desc.
|
||||
search_payload: Dict[str, Any] = {
|
||||
"filter": filters,
|
||||
"maxRecords": 1,
|
||||
"orderby": [
|
||||
{"field": "createDate", "direction": "desc"},
|
||||
{"field": "id", "direction": "desc"},
|
||||
],
|
||||
}
|
||||
|
||||
params = {"search": json.dumps(search_payload)}
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] Tickets/query lookup payload=%s",
|
||||
corr_id or "-",
|
||||
self._safe_json_preview(search_payload, max_chars=1200),
|
||||
)
|
||||
|
||||
data = self._request("GET", "Tickets/query", params=params)
|
||||
items = self._as_items_list(data)
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] Tickets/query lookup result_count=%s keys=%s",
|
||||
corr_id or "-",
|
||||
len(items),
|
||||
(sorted(list(items[0].keys())) if items and isinstance(items[0], dict) else None),
|
||||
)
|
||||
if not items:
|
||||
return None
|
||||
|
||||
first = items[0]
|
||||
if isinstance(first, dict) and str(first.get("id") or "").isdigit():
|
||||
return int(first["id"])
|
||||
return None
|
||||
|
||||
def create_ticket(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a Ticket in Autotask.
|
||||
|
||||
Uses POST /Tickets.
|
||||
Returns the created ticket object (as returned by Autotask).
|
||||
"""
|
||||
if not isinstance(payload, dict) or not payload:
|
||||
raise AutotaskError("Ticket payload is empty.")
|
||||
|
||||
corr_id = uuid.uuid4().hex[:10]
|
||||
|
||||
if self._debug_enabled():
|
||||
# Avoid dumping full descriptions by default, but include key routing fields.
|
||||
payload_keys = sorted(list(payload.keys()))
|
||||
logger.info(
|
||||
"[autotask][%s] POST /Tickets payload_keys=%s companyID=%s queueID=%s source=%s status=%s priority=%s trackingIdentifier=%s",
|
||||
corr_id,
|
||||
payload_keys,
|
||||
payload.get("companyID") or payload.get("CompanyID") or payload.get("companyId"),
|
||||
payload.get("queueID") or payload.get("QueueID") or payload.get("queueId"),
|
||||
payload.get("source") or payload.get("Source") or payload.get("sourceId") or payload.get("sourceID"),
|
||||
payload.get("status") or payload.get("Status") or payload.get("statusId") or payload.get("statusID"),
|
||||
payload.get("priority") or payload.get("Priority"),
|
||||
payload.get("trackingIdentifier") or payload.get("TrackingIdentifier"),
|
||||
)
|
||||
|
||||
resp = self._request_raw("POST", "Tickets", json_body=payload)
|
||||
|
||||
if self._debug_enabled():
|
||||
location = (resp.headers.get("Location") or resp.headers.get("location") or "").strip()
|
||||
logger.info(
|
||||
"[autotask][%s] POST /Tickets response http=%s content_type=%s content_length=%s location=%s",
|
||||
corr_id,
|
||||
resp.status_code,
|
||||
(resp.headers.get("Content-Type") or resp.headers.get("content-type") or ""),
|
||||
(len(resp.content or b"") if resp is not None else None),
|
||||
location or None,
|
||||
)
|
||||
|
||||
data: Any = {}
|
||||
if resp.content:
|
||||
try:
|
||||
data = resp.json()
|
||||
except Exception:
|
||||
# Some tenants return an empty body or a non-JSON body on successful POST.
|
||||
data = {}
|
||||
if self._debug_enabled():
|
||||
# Log a short preview of the raw body to understand tenant behaviour.
|
||||
try:
|
||||
body_preview = (resp.text or "")[:600]
|
||||
except Exception:
|
||||
body_preview = ""
|
||||
logger.info(
|
||||
"[autotask][%s] POST /Tickets non-JSON body preview=%s",
|
||||
corr_id,
|
||||
body_preview,
|
||||
)
|
||||
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] POST /Tickets parsed_json_type=%s json_preview=%s",
|
||||
corr_id,
|
||||
type(data).__name__,
|
||||
self._safe_json_preview(data, max_chars=1200),
|
||||
)
|
||||
|
||||
ticket_id: Optional[int] = None
|
||||
|
||||
# Autotask may return a lightweight create result like {"itemId": 12345}.
|
||||
if isinstance(data, dict):
|
||||
for key in ("itemId", "itemID", "id", "ticketId", "ticketID"):
|
||||
if key in data and str(data.get(key) or "").isdigit():
|
||||
ticket_id = int(data[key])
|
||||
break
|
||||
|
||||
# Some variants wrap the created entity.
|
||||
if ticket_id is None and "item" in data and isinstance(data.get("item"), dict):
|
||||
item = data.get("item")
|
||||
if "id" in item and str(item.get("id") or "").isdigit():
|
||||
ticket_id = int(item["id"])
|
||||
else:
|
||||
return item
|
||||
|
||||
if ticket_id is None and "items" in data and isinstance(data.get("items"), list) and data.get("items"):
|
||||
first = data.get("items")[0]
|
||||
if isinstance(first, dict):
|
||||
if "id" in first and str(first.get("id") or "").isdigit():
|
||||
ticket_id = int(first["id"])
|
||||
else:
|
||||
return first
|
||||
|
||||
# Location header often contains the created entity URL.
|
||||
if ticket_id is None:
|
||||
location = (resp.headers.get("Location") or resp.headers.get("location") or "").strip()
|
||||
if location:
|
||||
try:
|
||||
last = location.rstrip("/").split("/")[-1]
|
||||
if last.isdigit():
|
||||
ticket_id = int(last)
|
||||
except Exception:
|
||||
ticket_id = None
|
||||
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] POST /Tickets extracted_ticket_id=%s",
|
||||
corr_id,
|
||||
ticket_id,
|
||||
)
|
||||
|
||||
# If we have an ID, fetch the full ticket object so callers can reliably access ticketNumber etc.
|
||||
if ticket_id is not None:
|
||||
return self.get_ticket(ticket_id)
|
||||
|
||||
# Deterministic fallback: query by tracking identifier (+ company) if present.
|
||||
tracking_identifier = (
|
||||
payload.get("trackingIdentifier")
|
||||
or payload.get("TrackingIdentifier")
|
||||
or ""
|
||||
)
|
||||
company_id: Optional[int] = None
|
||||
for ck in ("companyID", "companyId", "CompanyID"):
|
||||
if str(payload.get(ck) or "").isdigit():
|
||||
company_id = int(payload[ck])
|
||||
break
|
||||
|
||||
if self._debug_enabled():
|
||||
logger.info(
|
||||
"[autotask][%s] fallback lookup by TrackingIdentifier=%s companyID=%s",
|
||||
corr_id,
|
||||
str(tracking_identifier),
|
||||
company_id,
|
||||
)
|
||||
|
||||
looked_up_id = self._lookup_created_ticket_id(
|
||||
str(tracking_identifier),
|
||||
company_id=company_id,
|
||||
corr_id=corr_id,
|
||||
)
|
||||
if looked_up_id is not None:
|
||||
return self.get_ticket(looked_up_id)
|
||||
|
||||
# Last-resort fallback: normalize first item if possible.
|
||||
items = self._as_items_list(data)
|
||||
if items:
|
||||
return items[0]
|
||||
|
||||
raise AutotaskError(
|
||||
"Autotask did not return a ticket id. "
|
||||
"Ticket creation may still have succeeded. "
|
||||
f"(HTTP {resp.status_code}, Correlation={corr_id})."
|
||||
)
|
||||
|
||||
@ -16,7 +16,6 @@ from .parsers import parse_mail_message
|
||||
from .parsers.veeam import extract_vspc_active_alarms_companies
|
||||
from .email_utils import normalize_from_address, extract_best_html_from_eml, is_effectively_blank_html
|
||||
from .job_matching import find_matching_job
|
||||
from .ticketing_utils import link_open_internal_tickets_to_run
|
||||
|
||||
|
||||
GRAPH_TOKEN_URL_TEMPLATE = "https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token"
|
||||
@ -335,12 +334,6 @@ def _store_messages(settings: SystemSettings, messages):
|
||||
db.session.add(run)
|
||||
db.session.flush()
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
link_open_internal_tickets_to_run(run=run, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
auto_approved_runs.append((job.customer_id, job.id, run.id, mail.id))
|
||||
created_any = True
|
||||
|
||||
@ -391,12 +384,6 @@ def _store_messages(settings: SystemSettings, messages):
|
||||
db.session.add(run)
|
||||
db.session.flush() # ensure run.id is available
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
link_open_internal_tickets_to_run(run=run, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Update mail message to reflect approval
|
||||
mail.job_id = job.id
|
||||
if hasattr(mail, "approved"):
|
||||
|
||||
@ -4,7 +4,6 @@ from .routes_shared import _format_datetime, _log_admin_event, _send_mail_messag
|
||||
from ..email_utils import extract_best_html_from_eml, is_effectively_blank_html
|
||||
from ..parsers.veeam import extract_vspc_active_alarms_companies
|
||||
from ..models import MailObject
|
||||
from ..ticketing_utils import link_open_internal_tickets_to_run
|
||||
|
||||
import time
|
||||
import re
|
||||
@ -296,13 +295,6 @@ def inbox_message_approve(message_id: int):
|
||||
run.storage_free_percent = msg.storage_free_percent
|
||||
db.session.add(run)
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
db.session.flush() # ensure run.id is available
|
||||
link_open_internal_tickets_to_run(run=run, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Update mail message to reflect approval
|
||||
msg.job_id = job.id
|
||||
if hasattr(msg, "approved"):
|
||||
@ -546,12 +538,6 @@ def inbox_message_approve_vspc_companies(message_id: int):
|
||||
|
||||
db.session.add(run)
|
||||
db.session.flush()
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
link_open_internal_tickets_to_run(run=run, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
created_runs.append(run)
|
||||
|
||||
# Persist objects for reporting (idempotent upsert; safe to repeat).
|
||||
@ -699,12 +685,6 @@ def inbox_message_approve_vspc_companies(message_id: int):
|
||||
db.session.add(run2)
|
||||
db.session.flush()
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
link_open_internal_tickets_to_run(run=run2, job=job2)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Persist objects per company
|
||||
try:
|
||||
persist_objects_for_approved_run_filtered(
|
||||
@ -1070,12 +1050,6 @@ def inbox_reparse_all():
|
||||
|
||||
db.session.add(run)
|
||||
db.session.flush()
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
link_open_internal_tickets_to_run(run=run, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id))
|
||||
created_any = True
|
||||
|
||||
@ -1136,12 +1110,6 @@ def inbox_reparse_all():
|
||||
|
||||
db.session.add(run)
|
||||
db.session.flush() # ensure run.id is available
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
link_open_internal_tickets_to_run(run=run, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id))
|
||||
|
||||
msg.job_id = job.id
|
||||
@ -1241,12 +1209,6 @@ def inbox_reparse_all():
|
||||
|
||||
db.session.add(run)
|
||||
db.session.flush()
|
||||
|
||||
# Legacy ticket behavior: inherit any open internal tickets for this job.
|
||||
try:
|
||||
link_open_internal_tickets_to_run(run=run, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
auto_approved_runs.append((job.customer_id, job.id, run.id, msg.id))
|
||||
|
||||
msg.job_id = job.id
|
||||
|
||||
@ -4,8 +4,7 @@ import calendar
|
||||
|
||||
from datetime import date, datetime, time, timedelta, timezone
|
||||
|
||||
from flask import jsonify, render_template, request, url_for
|
||||
from urllib.parse import urljoin
|
||||
from flask import jsonify, render_template, request
|
||||
from flask_login import current_user, login_required
|
||||
from sqlalchemy import and_, or_, func, text
|
||||
|
||||
@ -32,119 +31,10 @@ from ..models import (
|
||||
JobRunReviewEvent,
|
||||
MailMessage,
|
||||
MailObject,
|
||||
Ticket,
|
||||
TicketJobRun,
|
||||
TicketScope,
|
||||
Override,
|
||||
User,
|
||||
)
|
||||
|
||||
from ..ticketing_utils import (
|
||||
ensure_internal_ticket_for_job,
|
||||
ensure_ticket_jobrun_links,
|
||||
link_open_internal_tickets_to_run,
|
||||
)
|
||||
|
||||
|
||||
def _build_autotask_client_from_settings():
|
||||
"""Build an AutotaskClient from stored settings or raise a user-safe exception."""
|
||||
settings = _get_or_create_settings()
|
||||
if not getattr(settings, "autotask_enabled", False):
|
||||
raise RuntimeError("Autotask integration is disabled.")
|
||||
|
||||
required = [
|
||||
getattr(settings, "autotask_environment", None),
|
||||
getattr(settings, "autotask_api_username", None),
|
||||
getattr(settings, "autotask_api_password", None),
|
||||
getattr(settings, "autotask_tracking_identifier", None),
|
||||
]
|
||||
if any(not (x and str(x).strip()) for x in required):
|
||||
raise RuntimeError("Autotask settings incomplete.")
|
||||
|
||||
from ..integrations.autotask.client import AutotaskClient
|
||||
|
||||
return AutotaskClient(
|
||||
username=settings.autotask_api_username,
|
||||
password=settings.autotask_api_password,
|
||||
api_integration_code=settings.autotask_tracking_identifier,
|
||||
environment=settings.autotask_environment,
|
||||
)
|
||||
|
||||
|
||||
def _determine_autotask_severity(status_text: str | None) -> str:
|
||||
s = (status_text or "").strip().lower()
|
||||
if "warning" in s:
|
||||
return "warning"
|
||||
if "error" in s or "fail" in s:
|
||||
return "error"
|
||||
if "missed" in s:
|
||||
return "error"
|
||||
return "warning"
|
||||
|
||||
|
||||
def _compose_autotask_ticket_description(
|
||||
*,
|
||||
settings,
|
||||
job: Job,
|
||||
run: JobRun,
|
||||
status_display: str,
|
||||
overall_message: str,
|
||||
objects_payload: list[dict[str, str]],
|
||||
) -> str:
|
||||
tz_name = _get_ui_timezone_name() or "Europe/Amsterdam"
|
||||
run_dt = run.run_at
|
||||
run_at_str = _format_datetime(run_dt) if run_dt else "-"
|
||||
|
||||
base_url = (getattr(settings, "autotask_base_url", None) or "").strip()
|
||||
job_rel = url_for("main.job_detail", job_id=job.id)
|
||||
# Link to Job Details with a hint for the specific run.
|
||||
job_link = urljoin(base_url.rstrip("/") + "/", job_rel.lstrip("/"))
|
||||
if run.id:
|
||||
job_link = f"{job_link}?run_id={int(run.id)}"
|
||||
|
||||
lines: list[str] = []
|
||||
lines.append(f"Customer: {job.customer.name if job.customer else ''}")
|
||||
lines.append(f"Job: {job.job_name or ''}")
|
||||
lines.append(f"Backup: {job.backup_software or ''} / {job.backup_type or ''}")
|
||||
lines.append(f"Run at ({tz_name}): {run_at_str}")
|
||||
lines.append(f"Status: {status_display or ''}")
|
||||
lines.append("")
|
||||
|
||||
overall_message = (overall_message or "").strip()
|
||||
if overall_message:
|
||||
lines.append("Summary:")
|
||||
lines.append(overall_message)
|
||||
lines.append("")
|
||||
lines.append("Multiple objects reported messages. See Backupchecks for full details.")
|
||||
else:
|
||||
# Fallback to object-level messages with a hard limit.
|
||||
limit = 10
|
||||
shown = 0
|
||||
total = 0
|
||||
for o in objects_payload or []:
|
||||
name = (o.get("name") or "").strip()
|
||||
err = (o.get("error_message") or "").strip()
|
||||
st = (o.get("status") or "").strip()
|
||||
if not name:
|
||||
continue
|
||||
if not err and not st:
|
||||
continue
|
||||
total += 1
|
||||
if shown >= limit:
|
||||
continue
|
||||
msg = err or st
|
||||
lines.append(f"- {name}: {msg}")
|
||||
shown += 1
|
||||
|
||||
if total == 0:
|
||||
lines.append("No detailed object messages available. See Backupchecks for full details.")
|
||||
elif total > shown:
|
||||
lines.append(f"And {int(total - shown)} additional objects reported similar messages.")
|
||||
|
||||
lines.append("")
|
||||
lines.append(f"Backupchecks details: {job_link}")
|
||||
return "\n".join(lines).strip() + "\n"
|
||||
|
||||
# Grace window for matching real runs to an expected schedule slot.
|
||||
# A run within +/- 1 hour of the inferred schedule time counts as fulfilling the slot.
|
||||
MISSED_GRACE_WINDOW = timedelta(hours=1)
|
||||
@ -316,11 +206,6 @@ def _ensure_missed_runs_for_job(job: Job, start_from: date, end_inclusive: date)
|
||||
mail_message_id=None,
|
||||
)
|
||||
db.session.add(miss)
|
||||
try:
|
||||
db.session.flush() # ensure miss.id is available
|
||||
link_open_internal_tickets_to_run(run=miss, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
inserted += 1
|
||||
|
||||
d = d + timedelta(days=1)
|
||||
@ -402,11 +287,6 @@ def _ensure_missed_runs_for_job(job: Job, start_from: date, end_inclusive: date)
|
||||
mail_message_id=None,
|
||||
)
|
||||
db.session.add(miss)
|
||||
try:
|
||||
db.session.flush() # ensure miss.id is available
|
||||
link_open_internal_tickets_to_run(run=miss, job=job)
|
||||
except Exception:
|
||||
pass
|
||||
inserted += 1
|
||||
|
||||
# Next month
|
||||
@ -873,8 +753,6 @@ def run_checks_details():
|
||||
"mail": mail_meta,
|
||||
"body_html": body_html,
|
||||
"objects": objects_payload,
|
||||
"autotask_ticket_id": getattr(run, "autotask_ticket_id", None),
|
||||
"autotask_ticket_number": getattr(run, "autotask_ticket_number", None) or "",
|
||||
}
|
||||
)
|
||||
|
||||
@ -892,468 +770,6 @@ def run_checks_details():
|
||||
return jsonify({"status": "ok", "job": job_payload, "runs": runs_payload})
|
||||
|
||||
|
||||
|
||||
|
||||
@main_bp.get("/api/run-checks/autotask-ticket-poll")
|
||||
@login_required
|
||||
@roles_required("admin", "operator")
|
||||
def api_run_checks_autotask_ticket_poll():
|
||||
"""Poll Autotask ticket state for Run Checks.
|
||||
|
||||
Notes:
|
||||
- This endpoint does not mutate Autotask.
|
||||
- As part of the legacy ticket workflow restoration, it may backfill
|
||||
missing local metadata (ticket numbers) and internal Ticket/TicketJobRun
|
||||
relations when those are absent.
|
||||
"""
|
||||
|
||||
include_reviewed = False
|
||||
if get_active_role() == "admin":
|
||||
include_reviewed = request.args.get("include_reviewed", "0") in ("1", "true", "yes", "on")
|
||||
|
||||
# Only consider recently relevant runs to keep the payload small.
|
||||
# We intentionally avoid unbounded history polling.
|
||||
days = 60
|
||||
try:
|
||||
days = int(request.args.get("days", "60"))
|
||||
except Exception:
|
||||
days = 60
|
||||
if days < 1:
|
||||
days = 1
|
||||
if days > 180:
|
||||
days = 180
|
||||
|
||||
now_utc = datetime.utcnow().replace(tzinfo=None)
|
||||
window_start = now_utc - timedelta(days=days)
|
||||
|
||||
q = JobRun.query.filter(JobRun.autotask_ticket_id.isnot(None))
|
||||
if not include_reviewed:
|
||||
q = q.filter(JobRun.reviewed_at.is_(None))
|
||||
|
||||
# Only poll runs in our time window.
|
||||
q = q.filter(func.coalesce(JobRun.run_at, JobRun.created_at) >= window_start)
|
||||
|
||||
runs = (
|
||||
q.order_by(func.coalesce(JobRun.run_at, JobRun.created_at).desc(), JobRun.id.desc())
|
||||
.limit(400)
|
||||
.all()
|
||||
)
|
||||
|
||||
ticket_ids = []
|
||||
seen = set()
|
||||
for r in runs:
|
||||
tid = getattr(r, "autotask_ticket_id", None)
|
||||
try:
|
||||
tid_int = int(tid)
|
||||
except Exception:
|
||||
continue
|
||||
if tid_int <= 0 or tid_int in seen:
|
||||
continue
|
||||
seen.add(tid_int)
|
||||
ticket_ids.append(tid_int)
|
||||
|
||||
if not ticket_ids:
|
||||
return jsonify({"status": "ok", "tickets": []})
|
||||
|
||||
# If integration is disabled, do not fail the page.
|
||||
settings = _get_or_create_settings()
|
||||
if not getattr(settings, "autotask_enabled", False):
|
||||
return jsonify({"status": "ok", "tickets": [], "autotask_enabled": False})
|
||||
|
||||
try:
|
||||
client = _build_autotask_client_from_settings()
|
||||
except Exception as exc:
|
||||
return jsonify({"status": "ok", "tickets": [], "autotask_enabled": True, "message": str(exc)})
|
||||
|
||||
corr_id = datetime.utcnow().strftime("rcpoll-%Y%m%d%H%M%S")
|
||||
|
||||
# Query tickets in Autotask (best-effort)
|
||||
tickets = []
|
||||
try:
|
||||
tickets = client.query_tickets_by_ids(ticket_ids, corr_id=corr_id)
|
||||
except Exception:
|
||||
tickets = []
|
||||
|
||||
# Build a minimal payload for UI use.
|
||||
out = []
|
||||
for t in tickets or []:
|
||||
if not isinstance(t, dict):
|
||||
continue
|
||||
tid = t.get("id")
|
||||
try:
|
||||
tid_int = int(tid)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
out.append(
|
||||
{
|
||||
"id": tid_int,
|
||||
"ticketNumber": (t.get("ticketNumber") or t.get("TicketNumber") or "") or "",
|
||||
"status": t.get("status"),
|
||||
"statusName": (t.get("statusName") or t.get("StatusName") or "") or "",
|
||||
"title": (t.get("title") or t.get("Title") or "") or "",
|
||||
"lastActivityDate": (t.get("lastActivityDate") or t.get("LastActivityDate") or t.get("lastActivity") or "") or "",
|
||||
}
|
||||
)
|
||||
|
||||
# Backfill local ticket numbers and internal Ticket/TicketJobRun records when missing.
|
||||
# This is intentionally best-effort and must not break the Run Checks page.
|
||||
try:
|
||||
id_to_number = {}
|
||||
id_to_title = {}
|
||||
for item in out:
|
||||
tid = item.get("id")
|
||||
num = (item.get("ticketNumber") or "").strip()
|
||||
if tid and num:
|
||||
id_to_number[int(tid)] = num
|
||||
id_to_title[int(tid)] = (item.get("title") or "").strip() or None
|
||||
|
||||
if id_to_number:
|
||||
# Update JobRun.autotask_ticket_number if empty, and ensure internal ticket workflow exists.
|
||||
jobs_seen = set()
|
||||
for r in runs:
|
||||
try:
|
||||
tid = int(getattr(r, "autotask_ticket_id", None) or 0)
|
||||
except Exception:
|
||||
tid = 0
|
||||
if tid <= 0 or tid not in id_to_number:
|
||||
continue
|
||||
|
||||
number = id_to_number.get(tid)
|
||||
if number and not ((getattr(r, "autotask_ticket_number", None) or "").strip()):
|
||||
r.autotask_ticket_number = number
|
||||
db.session.add(r)
|
||||
|
||||
# Ensure internal Ticket + scope + links exist (per job)
|
||||
if r.job_id and int(r.job_id) not in jobs_seen:
|
||||
jobs_seen.add(int(r.job_id))
|
||||
job = Job.query.get(r.job_id)
|
||||
if not job:
|
||||
continue
|
||||
|
||||
ticket = ensure_internal_ticket_for_job(
|
||||
ticket_code=number,
|
||||
title=id_to_title.get(tid),
|
||||
description=f"Autotask ticket {number}",
|
||||
job=job,
|
||||
active_from_dt=getattr(r, "run_at", None),
|
||||
start_dt=datetime.utcnow(),
|
||||
)
|
||||
|
||||
# Link all currently active (unreviewed) runs for this job.
|
||||
run_ids = [
|
||||
int(x)
|
||||
for (x,) in (
|
||||
JobRun.query.filter(JobRun.job_id == job.id)
|
||||
.filter(JobRun.reviewed_at.is_(None))
|
||||
.with_entities(JobRun.id)
|
||||
.all()
|
||||
)
|
||||
if x is not None
|
||||
]
|
||||
ensure_ticket_jobrun_links(ticket_id=ticket.id, run_ids=run_ids, link_source="autotask")
|
||||
|
||||
db.session.commit()
|
||||
except Exception:
|
||||
try:
|
||||
db.session.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return jsonify({"status": "ok", "tickets": out, "autotask_enabled": True})
|
||||
@main_bp.post("/api/run-checks/autotask-ticket")
|
||||
@login_required
|
||||
@roles_required("admin", "operator")
|
||||
def api_run_checks_create_autotask_ticket():
|
||||
"""Create an Autotask ticket for a specific run.
|
||||
|
||||
Enforces: exactly one ticket per run.
|
||||
"""
|
||||
data = request.get_json(silent=True) or {}
|
||||
try:
|
||||
run_id = int(data.get("run_id") or 0)
|
||||
except Exception:
|
||||
run_id = 0
|
||||
|
||||
if run_id <= 0:
|
||||
return jsonify({"status": "error", "message": "Invalid parameters."}), 400
|
||||
|
||||
run = JobRun.query.get(run_id)
|
||||
if not run:
|
||||
return jsonify({"status": "error", "message": "Run not found."}), 404
|
||||
|
||||
# Idempotent behavior:
|
||||
# If the run already has an Autotask ticket linked, we still continue so we can:
|
||||
# - propagate the linkage to all active (non-reviewed) runs of the same job
|
||||
# - synchronize the internal Ticket + TicketJobRun records (used by Tickets/Remarks + Job Details)
|
||||
already_exists = False
|
||||
existing_ticket_id = getattr(run, "autotask_ticket_id", None)
|
||||
existing_ticket_number = (getattr(run, "autotask_ticket_number", None) or "").strip() or None
|
||||
if existing_ticket_id:
|
||||
already_exists = True
|
||||
|
||||
job = Job.query.get(run.job_id)
|
||||
if not job:
|
||||
return jsonify({"status": "error", "message": "Job not found."}), 404
|
||||
|
||||
customer = Customer.query.get(job.customer_id) if getattr(job, "customer_id", None) else None
|
||||
if not customer:
|
||||
return jsonify({"status": "error", "message": "Customer not found."}), 404
|
||||
|
||||
if not getattr(customer, "autotask_company_id", None):
|
||||
return jsonify({"status": "error", "message": "Customer has no Autotask company mapping."}), 400
|
||||
|
||||
if (getattr(customer, "autotask_mapping_status", None) or "").strip().lower() not in ("ok", "renamed"):
|
||||
return jsonify({"status": "error", "message": "Autotask company mapping is not valid."}), 400
|
||||
|
||||
settings = _get_or_create_settings()
|
||||
|
||||
base_url = (getattr(settings, "autotask_base_url", None) or "").strip()
|
||||
if not base_url:
|
||||
return jsonify({"status": "error", "message": "Autotask Base URL is not configured."}), 400
|
||||
|
||||
# Required ticket defaults
|
||||
if not getattr(settings, "autotask_default_queue_id", None):
|
||||
return jsonify({"status": "error", "message": "Autotask default queue is not configured."}), 400
|
||||
if not getattr(settings, "autotask_default_ticket_source_id", None):
|
||||
return jsonify({"status": "error", "message": "Autotask default ticket source is not configured."}), 400
|
||||
if not getattr(settings, "autotask_default_ticket_status", None):
|
||||
return jsonify({"status": "error", "message": "Autotask default ticket status is not configured."}), 400
|
||||
|
||||
# Determine display status (including overrides) for consistent subject/priority mapping.
|
||||
status_display = run.status or "-"
|
||||
try:
|
||||
status_display, _, _, _ov_id, _ov_reason = _apply_overrides_to_run(job, run)
|
||||
except Exception:
|
||||
status_display = run.status or "-"
|
||||
|
||||
severity = _determine_autotask_severity(status_display)
|
||||
priority_id = None
|
||||
if severity == "warning":
|
||||
priority_id = getattr(settings, "autotask_priority_warning", None)
|
||||
else:
|
||||
priority_id = getattr(settings, "autotask_priority_error", None)
|
||||
|
||||
# Load mail + objects for ticket composition.
|
||||
msg = MailMessage.query.get(run.mail_message_id) if run.mail_message_id else None
|
||||
overall_message = (getattr(msg, "overall_message", None) or "") if msg else ""
|
||||
|
||||
objects_payload: list[dict[str, str]] = []
|
||||
try:
|
||||
objs = run.objects.order_by(JobObject.object_name.asc()).all()
|
||||
except Exception:
|
||||
objs = list(run.objects or [])
|
||||
for o in objs or []:
|
||||
objects_payload.append(
|
||||
{
|
||||
"name": getattr(o, "object_name", "") or "",
|
||||
"type": getattr(o, "object_type", "") or "",
|
||||
"status": getattr(o, "status", "") or "",
|
||||
"error_message": getattr(o, "error_message", "") or "",
|
||||
}
|
||||
)
|
||||
|
||||
if (not objects_payload) and msg:
|
||||
try:
|
||||
mos = MailObject.query.filter_by(mail_message_id=msg.id).order_by(MailObject.object_name.asc()).all()
|
||||
except Exception:
|
||||
mos = []
|
||||
for mo in mos or []:
|
||||
objects_payload.append(
|
||||
{
|
||||
"name": getattr(mo, "object_name", "") or "",
|
||||
"type": getattr(mo, "object_type", "") or "",
|
||||
"status": getattr(mo, "status", "") or "",
|
||||
"error_message": getattr(mo, "error_message", "") or "",
|
||||
}
|
||||
)
|
||||
|
||||
subject = f"[Backupchecks] {customer.name} - {job.job_name or ''} - {status_display}"
|
||||
description = _compose_autotask_ticket_description(
|
||||
settings=settings,
|
||||
job=job,
|
||||
run=run,
|
||||
status_display=status_display,
|
||||
overall_message=overall_message,
|
||||
objects_payload=objects_payload,
|
||||
)
|
||||
|
||||
payload = {
|
||||
"companyID": int(customer.autotask_company_id),
|
||||
"title": subject,
|
||||
"description": description,
|
||||
"queueID": int(settings.autotask_default_queue_id),
|
||||
"source": int(settings.autotask_default_ticket_source_id),
|
||||
"status": int(settings.autotask_default_ticket_status),
|
||||
}
|
||||
if priority_id:
|
||||
payload["priority"] = int(priority_id)
|
||||
|
||||
try:
|
||||
client = _build_autotask_client_from_settings()
|
||||
except Exception as exc:
|
||||
return jsonify({"status": "error", "message": f"Autotask client initialization failed: {exc}"}), 400
|
||||
|
||||
ticket_id = None
|
||||
ticket_number = None
|
||||
|
||||
if already_exists:
|
||||
try:
|
||||
ticket_id = int(existing_ticket_id)
|
||||
except Exception:
|
||||
ticket_id = None
|
||||
ticket_number = existing_ticket_number
|
||||
else:
|
||||
try:
|
||||
created = client.create_ticket(payload)
|
||||
except Exception as exc:
|
||||
return jsonify({"status": "error", "message": f"Autotask ticket creation failed: {exc}"}), 400
|
||||
|
||||
if isinstance(created, dict):
|
||||
ticket_id = created.get("id") or created.get("itemId") or created.get("ticketId")
|
||||
try:
|
||||
# Some wrappers return {"item": {"id": ...}}
|
||||
if not ticket_id and isinstance(created.get("item"), dict):
|
||||
ticket_id = created.get("item", {}).get("id")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not ticket_id:
|
||||
return jsonify({"status": "error", "message": "Autotask did not return a ticket id."}), 400
|
||||
|
||||
# Autotask typically does not return the ticket number on create.
|
||||
# Also, existing linkages may have ticket_id but no ticket_number yet.
|
||||
# Always fetch the ticket if we don't have the number so we can persist it for UI and internal linking.
|
||||
if ticket_id and not ticket_number:
|
||||
try:
|
||||
fetched = client.get_ticket(int(ticket_id))
|
||||
if isinstance(fetched, dict) and isinstance(fetched.get("item"), dict):
|
||||
fetched = fetched.get("item")
|
||||
if isinstance(fetched, dict):
|
||||
ticket_number = fetched.get("ticketNumber") or fetched.get("number") or fetched.get("ticket_number")
|
||||
except Exception:
|
||||
ticket_number = ticket_number or None
|
||||
|
||||
# Link the created Autotask ticket to all relevant open runs of the same job.
|
||||
# This matches the manual ticket workflow where one ticket remains visible across runs
|
||||
# until it is explicitly resolved.
|
||||
now = datetime.utcnow()
|
||||
|
||||
# Collect the open run IDs first (stable list), then apply updates and internal linking.
|
||||
linked_run_ids: list[int] = []
|
||||
try:
|
||||
rows = (
|
||||
JobRun.query.filter(JobRun.job_id == run.job_id)
|
||||
.filter(JobRun.reviewed_at.is_(None))
|
||||
.with_entities(JobRun.id)
|
||||
.order_by(JobRun.id.asc())
|
||||
.all()
|
||||
)
|
||||
linked_run_ids = [int(rid) for (rid,) in rows if rid is not None]
|
||||
except Exception:
|
||||
linked_run_ids = []
|
||||
|
||||
# Safety: always include the explicitly selected run.
|
||||
try:
|
||||
if run.id and int(run.id) not in linked_run_ids:
|
||||
linked_run_ids.append(int(run.id))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Load run objects for the IDs we determined.
|
||||
open_runs = []
|
||||
if linked_run_ids:
|
||||
open_runs = JobRun.query.filter(JobRun.id.in_(linked_run_ids)).all()
|
||||
else:
|
||||
open_runs = [run]
|
||||
try:
|
||||
if run.id:
|
||||
linked_run_ids = [int(run.id)]
|
||||
except Exception:
|
||||
linked_run_ids = []
|
||||
|
||||
for r in open_runs or []:
|
||||
# Do not overwrite an existing (different) ticket linkage.
|
||||
existing_id = getattr(r, "autotask_ticket_id", None)
|
||||
if existing_id:
|
||||
try:
|
||||
if int(existing_id) != int(ticket_id):
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
try:
|
||||
r.autotask_ticket_id = int(ticket_id)
|
||||
except Exception:
|
||||
r.autotask_ticket_id = None
|
||||
|
||||
r.autotask_ticket_number = (str(ticket_number).strip() if ticket_number is not None else "") or None
|
||||
r.autotask_ticket_created_at = now
|
||||
r.autotask_ticket_created_by_user_id = current_user.id
|
||||
|
||||
# Also store an internal Ticket record and link it to all relevant active runs.
|
||||
# This keeps Tickets/Remarks, Job Details, and Run Checks indicators consistent with the existing manual workflow,
|
||||
# and remains functional even if PSA integration is disabled later.
|
||||
internal_ticket = None
|
||||
if ticket_number:
|
||||
ticket_code = (str(ticket_number) or "").strip().upper()
|
||||
internal_ticket = Ticket.query.filter_by(ticket_code=ticket_code).first()
|
||||
if not internal_ticket:
|
||||
internal_ticket = Ticket(
|
||||
ticket_code=ticket_code,
|
||||
title=subject,
|
||||
description=description,
|
||||
active_from_date=_to_amsterdam_date(run.run_at) or _to_amsterdam_date(now) or now.date(),
|
||||
start_date=now,
|
||||
resolved_at=None,
|
||||
)
|
||||
db.session.add(internal_ticket)
|
||||
db.session.flush()
|
||||
|
||||
# Ensure a job scope exists (used by popups / job details / tickets page)
|
||||
scope = None
|
||||
if job and job.id and internal_ticket and internal_ticket.id:
|
||||
scope = TicketScope.query.filter_by(ticket_id=internal_ticket.id, scope_type="job", job_id=job.id).first()
|
||||
if not scope and internal_ticket and internal_ticket.id:
|
||||
scope = TicketScope(
|
||||
ticket_id=internal_ticket.id,
|
||||
scope_type="job",
|
||||
customer_id=job.customer_id if job else None,
|
||||
backup_software=job.backup_software if job else None,
|
||||
backup_type=job.backup_type if job else None,
|
||||
job_id=job.id if job else None,
|
||||
job_name_match=job.job_name if job else None,
|
||||
job_name_match_mode="exact",
|
||||
resolved_at=None,
|
||||
)
|
||||
db.session.add(scope)
|
||||
elif scope:
|
||||
scope.resolved_at = None
|
||||
|
||||
# Link ticket to all relevant active job runs (idempotent)
|
||||
for rid in linked_run_ids or []:
|
||||
if not TicketJobRun.query.filter_by(ticket_id=internal_ticket.id, job_run_id=rid).first():
|
||||
db.session.add(TicketJobRun(ticket_id=internal_ticket.id, job_run_id=rid, link_source="autotask"))
|
||||
|
||||
try:
|
||||
for r in open_runs or []:
|
||||
db.session.add(r)
|
||||
db.session.commit()
|
||||
except Exception as exc:
|
||||
db.session.rollback()
|
||||
return jsonify({"status": "error", "message": f"Failed to store ticket reference: {exc}"}), 500
|
||||
|
||||
return jsonify(
|
||||
{
|
||||
"status": "ok",
|
||||
"ticket_id": int(run.autotask_ticket_id) if run.autotask_ticket_id else None,
|
||||
"ticket_number": run.autotask_ticket_number or "",
|
||||
"already_exists": bool(already_exists),
|
||||
"linked_run_ids": linked_run_ids or [],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@main_bp.post("/api/run-checks/mark-reviewed")
|
||||
@login_required
|
||||
@roles_required("admin", "operator")
|
||||
|
||||
@ -657,7 +657,6 @@ def settings():
|
||||
autotask_queues = []
|
||||
autotask_ticket_sources = []
|
||||
autotask_priorities = []
|
||||
autotask_ticket_statuses = []
|
||||
autotask_last_sync_at = getattr(settings, "autotask_reference_last_sync_at", None)
|
||||
|
||||
try:
|
||||
@ -678,12 +677,6 @@ def settings():
|
||||
except Exception:
|
||||
autotask_priorities = []
|
||||
|
||||
try:
|
||||
if getattr(settings, "autotask_cached_ticket_statuses_json", None):
|
||||
autotask_ticket_statuses = json.loads(settings.autotask_cached_ticket_statuses_json) or []
|
||||
except Exception:
|
||||
autotask_ticket_statuses = []
|
||||
|
||||
return render_template(
|
||||
"main/settings.html",
|
||||
settings=settings,
|
||||
@ -699,7 +692,6 @@ def settings():
|
||||
autotask_queues=autotask_queues,
|
||||
autotask_ticket_sources=autotask_ticket_sources,
|
||||
autotask_priorities=autotask_priorities,
|
||||
autotask_ticket_statuses=autotask_ticket_statuses,
|
||||
autotask_last_sync_at=autotask_last_sync_at,
|
||||
news_admin_items=news_admin_items,
|
||||
news_admin_stats=news_admin_stats,
|
||||
@ -1330,7 +1322,6 @@ def settings_autotask_refresh_reference_data():
|
||||
queues = client.get_queues()
|
||||
sources = client.get_ticket_sources()
|
||||
priorities = client.get_ticket_priorities()
|
||||
statuses = client.get_ticket_statuses()
|
||||
|
||||
# Store a minimal subset for dropdowns (id + name/label)
|
||||
# Note: Some "reference" values are exposed as picklists (value/label)
|
||||
@ -1363,7 +1354,6 @@ def settings_autotask_refresh_reference_data():
|
||||
|
||||
settings.autotask_cached_queues_json = json.dumps(_norm(queues))
|
||||
settings.autotask_cached_ticket_sources_json = json.dumps(_norm(sources))
|
||||
settings.autotask_cached_ticket_statuses_json = json.dumps(_norm(statuses))
|
||||
|
||||
# Priorities are returned as picklist values (value/label)
|
||||
pr_out = []
|
||||
@ -1387,13 +1377,13 @@ def settings_autotask_refresh_reference_data():
|
||||
db.session.commit()
|
||||
|
||||
flash(
|
||||
f"Autotask reference data refreshed. Queues: {len(queues)}. Ticket Sources: {len(sources)}. Ticket Statuses: {len(statuses)}. Priorities: {len(pr_out)}.",
|
||||
f"Autotask reference data refreshed. Queues: {len(queues)}. Ticket Sources: {len(sources)}. Priorities: {len(pr_out)}.",
|
||||
"success",
|
||||
)
|
||||
_log_admin_event(
|
||||
"autotask_refresh_reference_data",
|
||||
"Autotask reference data refreshed.",
|
||||
details=json.dumps({"queues": len(queues or []), "ticket_sources": len(sources or []), "ticket_statuses": len(statuses or []), "priorities": len(pr_out)}),
|
||||
details=json.dumps({"queues": len(queues or []), "ticket_sources": len(sources or []), "priorities": len(pr_out)}),
|
||||
)
|
||||
except Exception as exc:
|
||||
flash(f"Failed to refresh Autotask reference data: {exc}", "danger")
|
||||
|
||||
@ -168,7 +168,6 @@ def migrate_system_settings_autotask_integration() -> None:
|
||||
("autotask_cached_queues_json", "TEXT NULL"),
|
||||
("autotask_cached_ticket_sources_json", "TEXT NULL"),
|
||||
("autotask_cached_priorities_json", "TEXT NULL"),
|
||||
("autotask_cached_ticket_statuses_json", "TEXT NULL"),
|
||||
("autotask_reference_last_sync_at", "TIMESTAMP NULL"),
|
||||
]
|
||||
|
||||
@ -898,7 +897,6 @@ def run_migrations() -> None:
|
||||
migrate_overrides_match_columns()
|
||||
migrate_job_runs_review_tracking()
|
||||
migrate_job_runs_override_metadata()
|
||||
migrate_job_runs_autotask_ticket_fields()
|
||||
migrate_jobs_archiving()
|
||||
migrate_news_tables()
|
||||
migrate_reporting_tables()
|
||||
@ -906,67 +904,6 @@ def run_migrations() -> None:
|
||||
print("[migrations] All migrations completed.")
|
||||
|
||||
|
||||
def migrate_job_runs_autotask_ticket_fields() -> None:
|
||||
"""Add Autotask ticket linkage fields to job_runs if missing.
|
||||
|
||||
Columns:
|
||||
- job_runs.autotask_ticket_id (INTEGER NULL)
|
||||
- job_runs.autotask_ticket_number (VARCHAR(64) NULL)
|
||||
- job_runs.autotask_ticket_created_at (TIMESTAMP NULL)
|
||||
- job_runs.autotask_ticket_created_by_user_id (INTEGER NULL, FK users.id)
|
||||
"""
|
||||
|
||||
table = "job_runs"
|
||||
try:
|
||||
engine = db.get_engine()
|
||||
except Exception as exc:
|
||||
print(f"[migrations] Could not get engine for job_runs Autotask ticket migration: {exc}")
|
||||
return
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
cols = _get_table_columns(conn, table)
|
||||
if not cols:
|
||||
return
|
||||
|
||||
if "autotask_ticket_id" not in cols:
|
||||
print("[migrations] Adding job_runs.autotask_ticket_id column...")
|
||||
conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_id INTEGER'))
|
||||
|
||||
if "autotask_ticket_number" not in cols:
|
||||
print("[migrations] Adding job_runs.autotask_ticket_number column...")
|
||||
conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_number VARCHAR(64)'))
|
||||
|
||||
if "autotask_ticket_created_at" not in cols:
|
||||
print("[migrations] Adding job_runs.autotask_ticket_created_at column...")
|
||||
conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_created_at TIMESTAMP'))
|
||||
|
||||
if "autotask_ticket_created_by_user_id" not in cols:
|
||||
print("[migrations] Adding job_runs.autotask_ticket_created_by_user_id column...")
|
||||
conn.execute(text('ALTER TABLE "job_runs" ADD COLUMN autotask_ticket_created_by_user_id INTEGER'))
|
||||
|
||||
try:
|
||||
conn.execute(
|
||||
text(
|
||||
'ALTER TABLE "job_runs" '
|
||||
'ADD CONSTRAINT job_runs_autotask_ticket_created_by_user_id_fkey '
|
||||
'FOREIGN KEY (autotask_ticket_created_by_user_id) REFERENCES users(id) '
|
||||
'ON DELETE SET NULL'
|
||||
)
|
||||
)
|
||||
except Exception as exc:
|
||||
print(
|
||||
f"[migrations] Could not add FK job_runs.autotask_ticket_created_by_user_id -> users.id (continuing): {exc}"
|
||||
)
|
||||
|
||||
conn.execute(text('CREATE INDEX IF NOT EXISTS idx_job_runs_autotask_ticket_id ON "job_runs" (autotask_ticket_id)'))
|
||||
except Exception as exc:
|
||||
print(f"[migrations] job_runs table not found; skipping migrate_job_runs_autotask_ticket_fields: {exc}")
|
||||
return
|
||||
|
||||
print("[migrations] migrate_job_runs_autotask_ticket_fields completed.")
|
||||
|
||||
|
||||
def migrate_jobs_archiving() -> None:
|
||||
"""Add archiving columns to jobs if missing.
|
||||
|
||||
|
||||
@ -127,7 +127,6 @@ class SystemSettings(db.Model):
|
||||
autotask_cached_queues_json = db.Column(db.Text, nullable=True)
|
||||
autotask_cached_ticket_sources_json = db.Column(db.Text, nullable=True)
|
||||
autotask_cached_priorities_json = db.Column(db.Text, nullable=True)
|
||||
autotask_cached_ticket_statuses_json = db.Column(db.Text, nullable=True)
|
||||
autotask_reference_last_sync_at = db.Column(db.DateTime, nullable=True)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = db.Column(
|
||||
@ -276,12 +275,6 @@ class JobRun(db.Model):
|
||||
reviewed_at = db.Column(db.DateTime, nullable=True)
|
||||
reviewed_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
|
||||
|
||||
# Autotask integration (Phase 4: ticket creation from Run Checks)
|
||||
autotask_ticket_id = db.Column(db.Integer, nullable=True)
|
||||
autotask_ticket_number = db.Column(db.String(64), nullable=True)
|
||||
autotask_ticket_created_at = db.Column(db.DateTime, nullable=True)
|
||||
autotask_ticket_created_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
|
||||
|
||||
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = db.Column(
|
||||
@ -295,8 +288,6 @@ class JobRun(db.Model):
|
||||
|
||||
reviewed_by = db.relationship("User", foreign_keys=[reviewed_by_user_id])
|
||||
|
||||
autotask_ticket_created_by = db.relationship("User", foreign_keys=[autotask_ticket_created_by_user_id])
|
||||
|
||||
|
||||
class JobRunReviewEvent(db.Model):
|
||||
__tablename__ = "job_run_review_events"
|
||||
|
||||
@ -217,11 +217,13 @@
|
||||
<div class="col-12 col-lg-6">
|
||||
<div class="border rounded p-2">
|
||||
<div class="d-flex align-items-center justify-content-between">
|
||||
<div class="fw-semibold">Autotask ticket</div>
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" id="rcm_autotask_create">Create</button>
|
||||
<div class="fw-semibold">New ticket</div>
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" id="rcm_ticket_save">Add</button>
|
||||
</div>
|
||||
<div class="mt-2 small" id="rcm_autotask_info"></div>
|
||||
<div class="mt-2 small text-muted" id="rcm_autotask_status"></div>
|
||||
<div class="mt-2">
|
||||
<input class="form-control form-control-sm" id="rcm_ticket_code" type="text" placeholder="Ticket number (e.g., T20260106.0001)" />
|
||||
</div>
|
||||
<div class="mt-2 small text-muted" id="rcm_ticket_status"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-lg-6">
|
||||
@ -297,48 +299,10 @@
|
||||
var currentRunId = null;
|
||||
var currentPayload = null;
|
||||
|
||||
// Phase 2.1: Read-only Autotask ticket polling (Run Checks page only)
|
||||
// Cache shape: { <ticketId>: {id, ticketNumber, status, statusName, title, lastActivityDate} }
|
||||
var autotaskTicketPollCache = {};
|
||||
|
||||
function pollAutotaskTicketsOnPageOpen() {
|
||||
// Only execute on Run Checks page load.
|
||||
var url = '/api/run-checks/autotask-ticket-poll';
|
||||
var qs = [];
|
||||
// include_reviewed is only meaningful for admins
|
||||
try {
|
||||
var includeReviewed = {{ 'true' if include_reviewed else 'false' }};
|
||||
if (includeReviewed) qs.push('include_reviewed=1');
|
||||
} catch (e) {}
|
||||
if (qs.length) url += '?' + qs.join('&');
|
||||
|
||||
fetch(url)
|
||||
.then(function (r) { return r.json(); })
|
||||
.then(function (j) {
|
||||
if (!j || j.status !== 'ok') return;
|
||||
autotaskTicketPollCache = {};
|
||||
var list = (j.tickets || []);
|
||||
for (var i = 0; i < list.length; i++) {
|
||||
var t = list[i] || {};
|
||||
var id = parseInt(t.id, 10);
|
||||
if (!Number.isFinite(id) || id <= 0) continue;
|
||||
autotaskTicketPollCache[id] = t;
|
||||
}
|
||||
window.__rcAutotaskTicketPollCache = autotaskTicketPollCache;
|
||||
})
|
||||
.catch(function () {
|
||||
autotaskTicketPollCache = {};
|
||||
window.__rcAutotaskTicketPollCache = autotaskTicketPollCache;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
var btnMarkAllReviewed = document.getElementById('rcm_mark_all_reviewed');
|
||||
var btnMarkSuccessOverride = document.getElementById('rcm_mark_success_override');
|
||||
|
||||
pollAutotaskTicketsOnPageOpen();
|
||||
|
||||
// Shift-click range selection for checkbox rows
|
||||
// Shift-click range selection for checkbox rows
|
||||
var lastCheckedCb = null;
|
||||
|
||||
|
||||
@ -877,99 +841,56 @@ table.addEventListener('change', function (e) {
|
||||
}
|
||||
|
||||
function bindInlineCreateForms() {
|
||||
var btnAutotask = document.getElementById('rcm_autotask_create');
|
||||
var atInfo = document.getElementById('rcm_autotask_info');
|
||||
var atStatus = document.getElementById('rcm_autotask_status');
|
||||
|
||||
var btnTicket = document.getElementById('rcm_ticket_save');
|
||||
var btnRemark = document.getElementById('rcm_remark_save');
|
||||
var tCode = document.getElementById('rcm_ticket_code');
|
||||
var tStatus = document.getElementById('rcm_ticket_status');
|
||||
var rBody = document.getElementById('rcm_remark_body');
|
||||
var rStatus = document.getElementById('rcm_remark_status');
|
||||
|
||||
function clearStatus() {
|
||||
if (atStatus) atStatus.textContent = '';
|
||||
if (tStatus) tStatus.textContent = '';
|
||||
if (rStatus) rStatus.textContent = '';
|
||||
}
|
||||
|
||||
function setDisabled(disabled) {
|
||||
if (btnAutotask) btnAutotask.disabled = disabled;
|
||||
if (btnTicket) btnTicket.disabled = disabled;
|
||||
if (btnRemark) btnRemark.disabled = disabled;
|
||||
if (rBody) rBody.disabled = disabled;
|
||||
if (tCode) tCode.disabled = disabled;
|
||||
if (rBody) rBody.disabled = disabled;
|
||||
}
|
||||
|
||||
window.__rcmSetCreateDisabled = setDisabled;
|
||||
window.__rcmClearCreateStatus = clearStatus;
|
||||
|
||||
function renderAutotaskInfo(run) {
|
||||
if (!atInfo) return;
|
||||
var num = (run && run.autotask_ticket_number) ? String(run.autotask_ticket_number) : '';
|
||||
var tid = (run && run.autotask_ticket_id) ? parseInt(run.autotask_ticket_id, 10) : null;
|
||||
var polled = (tid && autotaskTicketPollCache && autotaskTicketPollCache[tid]) ? autotaskTicketPollCache[tid] : null;
|
||||
|
||||
var lines = [];
|
||||
if (num) {
|
||||
lines.push('<div><strong>Ticket:</strong> ' + escapeHtml(num) + '</div>');
|
||||
} else if (tid) {
|
||||
lines.push('<div><strong>Ticket:</strong> created</div>');
|
||||
} else {
|
||||
lines.push('<div class="text-muted">No Autotask ticket created for this run.</div>');
|
||||
}
|
||||
|
||||
// Phase 2.1 visibility only: show last polled status if available
|
||||
if (tid) {
|
||||
if (polled) {
|
||||
var statusName = (polled.statusName || '').toString().trim();
|
||||
var statusVal = (polled.status !== undefined && polled.status !== null) ? String(polled.status) : '';
|
||||
var label = statusName ? statusName : (statusVal ? ('Status ' + statusVal) : '');
|
||||
if (label) {
|
||||
lines.push('<div class="text-muted">PSA status (polled): ' + escapeHtml(label) + '</div>');
|
||||
}
|
||||
} else {
|
||||
lines.push('<div class="text-muted">PSA status (polled): not available</div>');
|
||||
}
|
||||
}
|
||||
|
||||
atInfo.innerHTML = lines.join('');
|
||||
}
|
||||
window.__rcmRenderAutotaskInfo = renderAutotaskInfo;
|
||||
|
||||
if (btnAutotask) {
|
||||
btnAutotask.addEventListener('click', function () {
|
||||
if (btnTicket) {
|
||||
btnTicket.addEventListener('click', function () {
|
||||
if (!currentRunId) { alert('Select a run first.'); return; }
|
||||
clearStatus();
|
||||
if (atStatus) atStatus.textContent = 'Creating ticket...';
|
||||
btnAutotask.disabled = true;
|
||||
apiJson('/api/run-checks/autotask-ticket', {
|
||||
var ticket_code = tCode ? (tCode.value || '').trim().toUpperCase() : '';
|
||||
if (!ticket_code) {
|
||||
if (tStatus) tStatus.textContent = 'Ticket number is required.';
|
||||
else alert('Ticket number is required.');
|
||||
return;
|
||||
}
|
||||
if (!/^T\d{8}\.\d{4}$/.test(ticket_code)) {
|
||||
if (tStatus) tStatus.textContent = 'Invalid ticket number format. Expected TYYYYMMDD.####.';
|
||||
else alert('Invalid ticket number format. Expected TYYYYMMDD.####.');
|
||||
return;
|
||||
}
|
||||
if (tStatus) tStatus.textContent = 'Saving...';
|
||||
apiJson('/api/tickets', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({run_id: currentRunId})
|
||||
body: JSON.stringify({job_run_id: currentRunId, ticket_code: ticket_code})
|
||||
})
|
||||
.then(function (j) {
|
||||
if (!j || j.status !== 'ok') throw new Error((j && j.message) || 'Failed.');
|
||||
if (atStatus) atStatus.textContent = '';
|
||||
|
||||
// Refresh modal data so UI reflects stored ticket linkage.
|
||||
var keepRunId = currentRunId;
|
||||
if (currentJobId) {
|
||||
return fetch('/api/run-checks/details?job_id=' + encodeURIComponent(currentJobId))
|
||||
.then(function (r) { return r.json(); })
|
||||
.then(function (payload) {
|
||||
currentPayload = payload;
|
||||
// Find the same run index
|
||||
var idx = 0;
|
||||
var runs = (payload && payload.runs) || [];
|
||||
for (var i = 0; i < runs.length; i++) {
|
||||
if (String(runs[i].id) === String(keepRunId)) { idx = i; break; }
|
||||
}
|
||||
// Re-render the currently open Run Checks modal with fresh data.
|
||||
renderRun(payload, idx);
|
||||
});
|
||||
}
|
||||
.then(function () {
|
||||
if (tCode) tCode.value = '';
|
||||
if (tStatus) tStatus.textContent = '';
|
||||
loadAlerts(currentRunId);
|
||||
})
|
||||
.catch(function (e) {
|
||||
if (atStatus) atStatus.textContent = e.message || 'Failed.';
|
||||
if (tStatus) tStatus.textContent = e.message || 'Failed.';
|
||||
else alert(e.message || 'Failed.');
|
||||
})
|
||||
.finally(function () {
|
||||
// State will be recalculated by renderRun.
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -1035,8 +956,7 @@ table.addEventListener('change', function (e) {
|
||||
|
||||
currentRunId = run.id || null;
|
||||
if (window.__rcmClearCreateStatus) window.__rcmClearCreateStatus();
|
||||
if (window.__rcmRenderAutotaskInfo) window.__rcmRenderAutotaskInfo(run);
|
||||
if (window.__rcmSetCreateDisabled) window.__rcmSetCreateDisabled(!currentRunId || !!run.autotask_ticket_id);
|
||||
if (window.__rcmSetCreateDisabled) window.__rcmSetCreateDisabled(!currentRunId);
|
||||
if (btnMarkSuccessOverride) {
|
||||
var _rs = (run.status || '').toString().toLowerCase();
|
||||
var _canOverride = !!currentRunId && !run.missed && (_rs.indexOf('override') === -1) && (_rs.indexOf('success') === -1);
|
||||
@ -1224,10 +1144,9 @@ table.addEventListener('change', function (e) {
|
||||
var dot = run.missed ? "dot-missed" : statusDotClass(run.status);
|
||||
var dotHtml = dot ? ('<span class="status-dot ' + dot + ' me-2" aria-hidden="true"></span>') : '';
|
||||
var reviewedMark = run.is_reviewed ? ' <span class="ms-2" title="Reviewed" aria-label="Reviewed">✔</span>' : '';
|
||||
var ticketMark = run.autotask_ticket_id ? ' <span class="ms-2" title="Autotask ticket created" aria-label="Autotask ticket">🎫</span>' : '';
|
||||
|
||||
a.title = run.status || '';
|
||||
a.innerHTML = dotHtml + '<span class="text-nowrap">' + escapeHtml(run.run_at || 'Run') + '</span>' + reviewedMark + ticketMark;
|
||||
a.innerHTML = dotHtml + '<span class="text-nowrap">' + escapeHtml(run.run_at || 'Run') + '</span>' + reviewedMark;
|
||||
a.addEventListener('click', function (ev) {
|
||||
ev.preventDefault();
|
||||
renderRun(data, idx);
|
||||
|
||||
@ -397,17 +397,6 @@
|
||||
<div class="form-text">Requires refreshed reference data.</div>
|
||||
</div>
|
||||
|
||||
<div class="col-md-6">
|
||||
<label for="autotask_default_ticket_status" class="form-label">Default Ticket Status</label>
|
||||
<select class="form-select" id="autotask_default_ticket_status" name="autotask_default_ticket_status">
|
||||
<option value="" {% if not settings.autotask_default_ticket_status %}selected{% endif %}>Select...</option>
|
||||
{% for st in autotask_ticket_statuses %}
|
||||
<option value="{{ st.id }}" {% if settings.autotask_default_ticket_status == st.id %}selected{% endif %}>{{ st.name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<div class="form-text">Required for Autotask ticket creation. Requires refreshed reference data.</div>
|
||||
</div>
|
||||
|
||||
<div class="col-md-6">
|
||||
<label for="autotask_priority_warning" class="form-label">Priority for Warning</label>
|
||||
<select class="form-select" id="autotask_priority_warning" name="autotask_priority_warning">
|
||||
@ -455,7 +444,6 @@
|
||||
<div class="text-muted small mt-2">
|
||||
Cached Queues: {{ autotask_queues|length }}<br />
|
||||
Cached Ticket Sources: {{ autotask_ticket_sources|length }}<br />
|
||||
Cached Ticket Statuses: {{ autotask_ticket_statuses|length }}<br />
|
||||
Cached Priorities: {{ autotask_priorities|length }}
|
||||
</div>
|
||||
</div>
|
||||
@ -468,7 +456,7 @@
|
||||
<button type="submit" class="btn btn-outline-primary">Refresh reference data</button>
|
||||
</form>
|
||||
</div>
|
||||
<div class="form-text mt-2 text-md-end">Refresh loads Queues, Ticket Sources, Ticket Statuses, and Priorities from Autotask for dropdown usage.</div>
|
||||
<div class="form-text mt-2 text-md-end">Refresh loads Queues, Ticket Sources, and Priorities from Autotask for dropdown usage.</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Loading…
Reference in New Issue
Block a user