Add remarks to global search results

This commit is contained in:
Ivo Oskamp 2026-02-16 16:57:51 +01:00
parent c8e7491c94
commit 652da5e117
2 changed files with 114 additions and 0 deletions

View File

@ -20,6 +20,7 @@ SEARCH_SECTION_KEYS = [
"daily_jobs", "daily_jobs",
"run_checks", "run_checks",
"tickets", "tickets",
"remarks",
"overrides", "overrides",
"reports", "reports",
] ]
@ -34,6 +35,7 @@ def _is_section_allowed(section: str) -> bool:
"daily_jobs": {"admin", "operator", "viewer"}, "daily_jobs": {"admin", "operator", "viewer"},
"run_checks": {"admin", "operator"}, "run_checks": {"admin", "operator"},
"tickets": {"admin", "operator", "viewer"}, "tickets": {"admin", "operator", "viewer"},
"remarks": {"admin", "operator", "viewer"},
"overrides": {"admin", "operator", "viewer"}, "overrides": {"admin", "operator", "viewer"},
"reports": {"admin", "operator", "viewer", "reporter"}, "reports": {"admin", "operator", "viewer", "reporter"},
} }
@ -629,6 +631,113 @@ def _build_tickets_results(patterns: list[str], page: int) -> dict:
return section return section
def _build_remarks_results(patterns: list[str], page: int) -> dict:
section = {
"key": "remarks",
"title": "Remarks",
"view_all_url": url_for("main.tickets_page", tab="remarks"),
"total": 0,
"items": [],
"current_page": 1,
"total_pages": 1,
"has_prev": False,
"has_next": False,
"prev_url": "",
"next_url": "",
}
if not _is_section_allowed("remarks"):
return section
query = (
db.session.query(Remark)
.select_from(Remark)
.outerjoin(RemarkScope, RemarkScope.remark_id == Remark.id)
.outerjoin(Customer, Customer.id == RemarkScope.customer_id)
.outerjoin(Job, Job.id == RemarkScope.job_id)
)
match_expr = _contains_all_terms(
[
func.coalesce(Remark.title, ""),
func.coalesce(Remark.body, ""),
func.coalesce(Customer.name, ""),
func.coalesce(RemarkScope.scope_type, ""),
func.coalesce(RemarkScope.backup_software, ""),
func.coalesce(RemarkScope.backup_type, ""),
func.coalesce(RemarkScope.job_name_match, ""),
func.coalesce(Job.job_name, ""),
cast(Remark.start_date, String),
cast(Remark.resolved_at, String),
],
patterns,
)
if match_expr is not None:
query = query.filter(match_expr)
query = query.distinct()
total, current_page, total_pages, rows = _paginate_query(
query,
page,
[Remark.start_date.desc().nullslast()],
)
_enrich_paging(section, total, current_page, total_pages)
for r in rows:
customer_display = "-"
scope_summary = "-"
try:
scope_rows = (
db.session.query(
RemarkScope.scope_type.label("scope_type"),
RemarkScope.backup_software.label("backup_software"),
RemarkScope.backup_type.label("backup_type"),
Customer.name.label("customer_name"),
)
.select_from(RemarkScope)
.outerjoin(Customer, Customer.id == RemarkScope.customer_id)
.filter(RemarkScope.remark_id == r.id)
.all()
)
customer_names = []
for s in scope_rows:
cname = getattr(s, "customer_name", None)
if cname and cname not in customer_names:
customer_names.append(cname)
if customer_names:
customer_display = customer_names[0]
if len(customer_names) > 1:
customer_display = f"{customer_display} +{len(customer_names)-1}"
if scope_rows:
s = scope_rows[0]
bits = []
if getattr(s, "scope_type", None):
bits.append(str(getattr(s, "scope_type")))
if getattr(s, "backup_software", None):
bits.append(str(getattr(s, "backup_software")))
if getattr(s, "backup_type", None):
bits.append(str(getattr(s, "backup_type")))
scope_summary = " / ".join(bits) if bits else "-"
except Exception:
customer_display = "-"
scope_summary = "-"
preview = (r.title or r.body or "").strip()
if len(preview) > 80:
preview = preview[:77] + "..."
section["items"].append(
{
"title": preview or f"Remark #{r.id}",
"subtitle": f"{customer_display} | {scope_summary}",
"meta": _format_datetime(r.start_date),
"link": url_for("main.remark_detail", remark_id=r.id),
}
)
return section
def _build_overrides_results(patterns: list[str], page: int) -> dict: def _build_overrides_results(patterns: list[str], page: int) -> dict:
section = { section = {
"key": "overrides", "key": "overrides",
@ -784,6 +893,7 @@ def search_page():
sections.append(_build_daily_jobs_results(patterns, requested_pages["daily_jobs"])) sections.append(_build_daily_jobs_results(patterns, requested_pages["daily_jobs"]))
sections.append(_build_run_checks_results(patterns, requested_pages["run_checks"])) sections.append(_build_run_checks_results(patterns, requested_pages["run_checks"]))
sections.append(_build_tickets_results(patterns, requested_pages["tickets"])) sections.append(_build_tickets_results(patterns, requested_pages["tickets"]))
sections.append(_build_remarks_results(patterns, requested_pages["remarks"]))
sections.append(_build_overrides_results(patterns, requested_pages["overrides"])) sections.append(_build_overrides_results(patterns, requested_pages["overrides"]))
sections.append(_build_reports_results(patterns, requested_pages["reports"])) sections.append(_build_reports_results(patterns, requested_pages["reports"]))
else: else:
@ -794,6 +904,7 @@ def search_page():
{"key": "daily_jobs", "title": "Daily Jobs", "view_all_url": url_for("main.daily_jobs"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""}, {"key": "daily_jobs", "title": "Daily Jobs", "view_all_url": url_for("main.daily_jobs"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""},
{"key": "run_checks", "title": "Run Checks", "view_all_url": url_for("main.run_checks_page"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""}, {"key": "run_checks", "title": "Run Checks", "view_all_url": url_for("main.run_checks_page"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""},
{"key": "tickets", "title": "Tickets", "view_all_url": url_for("main.tickets_page"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""}, {"key": "tickets", "title": "Tickets", "view_all_url": url_for("main.tickets_page"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""},
{"key": "remarks", "title": "Remarks", "view_all_url": url_for("main.tickets_page", tab="remarks"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""},
{"key": "overrides", "title": "Existing overrides", "view_all_url": url_for("main.overrides"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""}, {"key": "overrides", "title": "Existing overrides", "view_all_url": url_for("main.overrides"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""},
{"key": "reports", "title": "Reports", "view_all_url": url_for("main.reports"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""}, {"key": "reports", "title": "Reports", "view_all_url": url_for("main.reports"), "total": 0, "items": [], "current_page": 1, "total_pages": 1, "has_prev": False, "has_next": False, "prev_url": "", "next_url": ""},
] ]
@ -826,6 +937,8 @@ def search_page():
s["view_all_url"] = url_for("main.run_checks_page", q=query) s["view_all_url"] = url_for("main.run_checks_page", q=query)
elif key == "tickets": elif key == "tickets":
s["view_all_url"] = url_for("main.tickets_page", q=query) s["view_all_url"] = url_for("main.tickets_page", q=query)
elif key == "remarks":
s["view_all_url"] = url_for("main.tickets_page", tab="remarks", q=query)
elif key == "overrides": elif key == "overrides":
s["view_all_url"] = url_for("main.overrides", q=query) s["view_all_url"] = url_for("main.overrides", q=query)
elif key == "reports": elif key == "reports":

View File

@ -26,6 +26,7 @@ This file documents all changes made to this project via Claude Code.
- Changed filtering support on Inbox, Customers, Jobs, Daily Jobs, Run Checks, Tickets, Overrides, and Reports routes to accept wildcard-enabled `q` terms from search - Changed filtering support on Inbox, Customers, Jobs, Daily Jobs, Run Checks, Tickets, Overrides, and Reports routes to accept wildcard-enabled `q` terms from search
- Changed Reports frontend loading (`/api/reports`) to forward URL `q` so client-side refresh keeps the same filtered result set - Changed Reports frontend loading (`/api/reports`) to forward URL `q` so client-side refresh keeps the same filtered result set
- Changed Daily Jobs search section UI to show an explicit English note that the Daily Jobs page itself is day-scoped while search matches can reflect jobs across other days - Changed Daily Jobs search section UI to show an explicit English note that the Daily Jobs page itself is day-scoped while search matches can reflect jobs across other days
- Added a dedicated Remarks section to global search results (with paging and detail links), so remark records are searchable alongside tickets
### Fixed ### Fixed
- Fixed `/search` page crash (`TypeError: 'builtin_function_or_method' object is not iterable`) by replacing Jinja dict access from `section.items` to `section['items']` in `templates/main/search.html` - Fixed `/search` page crash (`TypeError: 'builtin_function_or_method' object is not iterable`) by replacing Jinja dict access from `section.items` to `section['items']` in `templates/main/search.html`