diff --git a/containers/backupchecks/src/backend/app/cloud_connect_importer.py b/containers/backupchecks/src/backend/app/cloud_connect_importer.py
new file mode 100644
index 0000000..2eeed0e
--- /dev/null
+++ b/containers/backupchecks/src/backend/app/cloud_connect_importer.py
@@ -0,0 +1,306 @@
+"""Veeam Cloud Connect daily report importer.
+
+Parses the HTML body of a Veeam Cloud Connect provider daily report email
+and upserts each tenant (User row) into the cloud_connect_accounts staging
+table — identical in spirit to the Cove Data Protection importer.
+
+Flow:
+ 1. When the mail-importer receives a Cloud Connect daily report it calls
+ ``upsert_cloud_connect_report(mail_message_id)``.
+ 2. Every User × section combination is upserted into cloud_connect_accounts.
+ 3. Unlinked accounts appear on the new "Cloud Connect" review page where an
+ admin can create or link a Backupchecks job (same UX as Cove Accounts).
+ 4. For linked accounts a JobRun is created/updated; the mail_message_id is
+ attached so the mail body is available in the popup.
+
+Status mapping (row background colour in the HTML report):
+ #fb9895 → Failed
+ #ffd96c → Warning
+ #ffffff → Success
+"""
+from __future__ import annotations
+
+import logging
+import re
+from datetime import datetime, timedelta
+from typing import Optional
+
+from .database import db
+from .models import CloudConnectAccount, Customer, Job, JobRun
+
+logger = logging.getLogger(__name__)
+
+
+# ---------------------------------------------------------------------------
+# HTML parsing helpers
+# ---------------------------------------------------------------------------
+
+def _strip_tags(html: str) -> str:
+ """Strip HTML tags and normalise whitespace."""
+ if not html:
+ return ""
+ text = re.sub(r" ", " ", html, flags=re.IGNORECASE)
+ text = re.sub(r"<[^>]+>", "", text)
+ text = re.sub(r"\s+", " ", text)
+ return text.strip()
+
+
+def _row_status(row_style: str) -> str:
+ """Map Veeam row background colour to a Backupchecks status string."""
+ m = re.search(r"background-color\s*:\s*([^;\"'\s]+)", row_style, re.IGNORECASE)
+ if not m:
+ return "Success"
+ colour = m.group(1).strip().lower()
+ if colour in {"#fb9895", "#ff9999", "#f4cccc", "#ffb3b3"}:
+ return "Failed"
+ if colour in {"#ffd96c", "#fff2cc", "#ffe599", "#f9cb9c"}:
+ return "Warning"
+ return "Success"
+
+
+def _parse_last_active(raw: str) -> Optional[datetime]:
+ """Convert a 'Last active' string like '14 hours ago' to a UTC datetime.
+
+ Returns None when the value is 'never' or cannot be parsed.
+ """
+ s = (raw or "").strip().lower()
+ if not s or s == "never":
+ return None
+ now = datetime.utcnow()
+ m = re.match(r"(\d+)\s+(hour|day|week|month)s?\s+ago", s)
+ if not m:
+ return None
+ n = int(m.group(1))
+ unit = m.group(2)
+ if unit == "hour":
+ return now - timedelta(hours=n)
+ if unit == "day":
+ return now - timedelta(days=n)
+ if unit == "week":
+ return now - timedelta(weeks=n)
+ if unit == "month":
+ return now - timedelta(days=n * 30)
+ return None
+
+
+def _parse_report_tables(html: str) -> list[dict]:
+ """Extract all tenant rows from a Cloud Connect daily report HTML body.
+
+ Returns a list of dicts with keys:
+ section, user, repo_name, repo_type, num_items,
+ total_quota, used_space, free_space, last_active_raw,
+ last_active_dt, status
+ """
+ if not html:
+ return []
+
+ # Section headers are
tags with font-size 18px just before each table.
+ # We walk the HTML top-to-bottom, tracking the current section name.
+ section_pattern = re.compile(
+ r'
]*font-size:\s*18px[^>]*>\s*(Backup|Replication|Agent)\s*
',
+ re.IGNORECASE | re.DOTALL,
+ )
+ table_pattern = re.compile(r'', re.IGNORECASE | re.DOTALL)
+ row_pattern = re.compile(r']*)>(.*?) ', re.IGNORECASE | re.DOTALL)
+ cell_pattern = re.compile(r']*>(.*?) ', re.IGNORECASE | re.DOTALL)
+
+ results: list[dict] = []
+
+ # Find positions of section headers and tables to interleave them.
+ section_positions = [(m.start(), m.group(1)) for m in section_pattern.finditer(html)]
+ table_positions = [(m.start(), m.group(1)) for m in table_pattern.finditer(html)]
+
+ def _section_for_table(table_start: int) -> str:
+ """Return the section name of the nearest preceding section header."""
+ current = "Backup"
+ for pos, name in section_positions:
+ if pos < table_start:
+ current = name
+ return current
+
+ for table_start, table_inner in table_positions:
+ section = _section_for_table(table_start)
+
+ rows = row_pattern.findall(table_inner)
+ if not rows:
+ continue
+
+ # Determine column positions from header row.
+ first_row_cells = [_strip_tags(c).strip() for _, c in [rows[0]]]
+ # Re-parse properly:
+ header_cells = [_strip_tags(c).strip() for c in cell_pattern.findall(rows[0][1])]
+ if not header_cells or header_cells[0].lower() != "user":
+ continue # not a tenant table (e.g. the version footer table)
+
+ # Column index lookup (graceful — different tables have different columns)
+ col = {name.lower(): i for i, name in enumerate(header_cells)}
+
+ for row_attr, row_inner in rows[1:]:
+ cells_raw = cell_pattern.findall(row_inner)
+ cells = [_strip_tags(c).strip() for c in cells_raw]
+ if not cells:
+ continue
+
+ user = cells[0] if len(cells) > 0 else ""
+ if not user or user.upper() == "TOTAL":
+ continue
+
+ # Column indices differ between Backup and Agent tables.
+ # Backup: User | #VM | Repo Name | Repo | Quota | Used | Free | Last active | Expiry
+ # Agent: User | #WS | #Server | Repo Name | Repo | Quota | Used | Free | Last active | Expiry
+ is_agent = section.lower() == "agent"
+
+ if is_agent:
+ num_ws = cells[1] if len(cells) > 1 else ""
+ num_srv = cells[2] if len(cells) > 2 else ""
+ num_items = f"{num_ws} WS / {num_srv} Server"
+ repo_name = cells[3] if len(cells) > 3 else ""
+ repo_type = cells[4] if len(cells) > 4 else ""
+ total_quota = cells[5] if len(cells) > 5 else ""
+ used_space = cells[6] if len(cells) > 6 else ""
+ free_space = cells[7] if len(cells) > 7 else ""
+ last_active_raw = cells[8] if len(cells) > 8 else ""
+ else:
+ num_items = cells[1] if len(cells) > 1 else ""
+ repo_name = cells[2] if len(cells) > 2 else ""
+ repo_type = cells[3] if len(cells) > 3 else ""
+ total_quota = cells[4] if len(cells) > 4 else ""
+ used_space = cells[5] if len(cells) > 5 else ""
+ free_space = cells[6] if len(cells) > 6 else ""
+ last_active_raw = cells[7] if len(cells) > 7 else ""
+
+ status = _row_status(row_attr)
+
+ # Downgrade to Warning when last active is suspiciously old but row is white.
+ last_active_dt = _parse_last_active(last_active_raw)
+ if status == "Success" and last_active_raw.lower() == "never":
+ status = "Warning"
+ elif (
+ status == "Success"
+ and last_active_dt
+ and (datetime.utcnow() - last_active_dt) > timedelta(days=3)
+ ):
+ status = "Warning"
+
+ results.append({
+ "section": section,
+ "user": user,
+ "repo_name": repo_name,
+ "repo_type": repo_type,
+ "num_items": num_items,
+ "total_quota": total_quota,
+ "used_space": used_space,
+ "free_space": free_space,
+ "last_active_raw": last_active_raw,
+ "last_active_dt": last_active_dt,
+ "status": status,
+ })
+
+ return results
+
+
+# ---------------------------------------------------------------------------
+# Public import entry point
+# ---------------------------------------------------------------------------
+
+def upsert_cloud_connect_report(mail_message_id: int, html_body: str) -> dict:
+ """Parse a Cloud Connect daily report and upsert all tenant rows.
+
+ Called by the mail importer when it detects a Cloud Connect daily report.
+ Returns a summary dict: {total, linked, unlinked, created, skipped}.
+ """
+ rows = _parse_report_tables(html_body)
+ if not rows:
+ return {"total": 0, "linked": 0, "unlinked": 0, "created": 0, "skipped": 0}
+
+ now = datetime.utcnow()
+ counters = {"total": len(rows), "linked": 0, "unlinked": 0, "created": 0, "skipped": 0}
+
+ for row in rows:
+ user = row["user"]
+ section = row["section"]
+
+ # Upsert the staging record — keyed on (user, section).
+ acc = CloudConnectAccount.query.filter_by(user=user, section=section).first()
+ if acc is None:
+ acc = CloudConnectAccount(
+ user=user,
+ section=section,
+ first_seen_at=now,
+ )
+ db.session.add(acc)
+
+ acc.repo_name = row["repo_name"]
+ acc.repo_type = row["repo_type"]
+ acc.num_items = row["num_items"]
+ acc.total_quota = row["total_quota"]
+ acc.used_space = row["used_space"]
+ acc.free_space = row["free_space"]
+ acc.last_active_raw = row["last_active_raw"]
+ acc.last_active_dt = row["last_active_dt"]
+ acc.last_status = row["status"]
+ acc.last_seen_at = now
+ acc.last_mail_message_id = mail_message_id
+
+ db.session.flush()
+
+ if not acc.job_id:
+ counters["unlinked"] += 1
+ continue
+
+ # Account is linked — create a JobRun if not already present for today.
+ job = Job.query.get(acc.job_id)
+ if not job:
+ counters["skipped"] += 1
+ continue
+
+ # Deduplicate: one run per job per calendar day (report is daily).
+ run_date = now.date().isoformat()
+ external_id = f"vcc-{user}-{section}-{run_date}".lower().replace(" ", "_")
+
+ existing = JobRun.query.filter_by(job_id=job.id, external_id=external_id).first()
+ if existing:
+ # Update status in case re-import happens same day with different result.
+ existing.status = row["status"]
+ existing.run_at = now
+ db.session.add(existing)
+ counters["skipped"] += 1
+ counters["linked"] += 1
+ continue
+
+ error_message = _build_error_message(row)
+
+ run = JobRun(
+ job_id=job.id,
+ mail_message_id=mail_message_id,
+ run_at=now,
+ status=row["status"],
+ remark=error_message or None,
+ missed=False,
+ override_applied=False,
+ source_type="cloud_connect",
+ external_id=external_id,
+ )
+ db.session.add(run)
+ counters["created"] += 1
+ counters["linked"] += 1
+
+ db.session.commit()
+ return counters
+
+
+def _build_error_message(row: dict) -> str:
+ """Build a human-readable remark for a Cloud Connect run."""
+ parts = [
+ f"Repository: {row['repo_name']} ({row['repo_type']})",
+ f"Used: {row['used_space']} / {row['total_quota']}",
+ f"Free: {row['free_space']}",
+ f"Last active: {row['last_active_raw'] or 'unknown'}",
+ ]
+ if row["status"] == "Failed":
+ parts.append("⚠ Repository appears to be full or near full")
+ elif row["status"] == "Warning" and row["last_active_raw"].lower() in ("never", ""):
+ parts.append("⚠ Backup has never run")
+ elif row["status"] == "Warning" and "days ago" in row["last_active_raw"].lower():
+ parts.append(f"⚠ No recent activity: {row['last_active_raw']}")
+ return " | ".join(parts)
diff --git a/containers/backupchecks/src/backend/app/mail_importer.py b/containers/backupchecks/src/backend/app/mail_importer.py
index ab608fb..f8dd92a 100644
--- a/containers/backupchecks/src/backend/app/mail_importer.py
+++ b/containers/backupchecks/src/backend/app/mail_importer.py
@@ -14,6 +14,7 @@ from . import db
from .models import MailMessage, SystemSettings, Job, JobRun, MailObject
from .parsers import parse_mail_message
from .parsers.veeam import extract_vspc_active_alarms_companies
+from .cloud_connect_importer import upsert_cloud_connect_report
from .email_utils import normalize_from_address, extract_best_html_from_eml, is_effectively_blank_html
from .job_matching import find_matching_job
from .ticketing_utils import link_open_internal_tickets_to_run
@@ -272,6 +273,37 @@ def _store_messages(settings: SystemSettings, messages):
btype = (getattr(mail, "backup_type", "") or "").strip().lower()
jname = (getattr(mail, "job_name", "") or "").strip().lower()
+ # ── Veeam Cloud Connect daily report ──────────────────────
+ # One report contains all tenants. Upsert each into the
+ # cloud_connect_accounts staging table; linked accounts get
+ # a JobRun automatically — same flow as Cove Data Protection.
+ if bsw == "veeam" and btype == "cloud connect report":
+ try:
+ result = upsert_cloud_connect_report(
+ mail_message_id=mail.id,
+ html_body=(mail.html_body or ""),
+ )
+ logger.debug(
+ "Cloud Connect import: total=%s linked=%s unlinked=%s "
+ "created=%s skipped=%s",
+ result.get("total"), result.get("linked"),
+ result.get("unlinked"), result.get("created"),
+ result.get("skipped"),
+ )
+ if result.get("created", 0) > 0 or result.get("linked", 0) > 0:
+ if hasattr(mail, "approved"):
+ mail.approved = True
+ if hasattr(mail, "approved_at"):
+ mail.approved_at = datetime.utcnow()
+ if hasattr(mail, "location"):
+ mail.location = "history"
+ auto_approved += 1
+ except Exception as cc_exc:
+ logger.warning("Cloud Connect import failed: %s", cc_exc)
+ db.session.commit()
+ continue
+ # ── end Cloud Connect ──────────────────────────────────────
+
if bsw == "veeam" and btype == "service provider console" and jname == "active alarms summary":
raw = (mail.text_body or "").strip() or (mail.html_body or "")
companies = extract_vspc_active_alarms_companies(raw)
diff --git a/containers/backupchecks/src/backend/app/main/routes.py b/containers/backupchecks/src/backend/app/main/routes.py
index d6a777d..dedea7d 100644
--- a/containers/backupchecks/src/backend/app/main/routes.py
+++ b/containers/backupchecks/src/backend/app/main/routes.py
@@ -28,5 +28,6 @@ from . import routes_reporting_api # noqa: F401
from . import routes_user_settings # noqa: F401
from . import routes_search # noqa: F401
from . import routes_cove # noqa: F401
+from . import routes_cloud_connect # noqa: F401
__all__ = ["main_bp", "roles_required"]
diff --git a/containers/backupchecks/src/backend/app/main/routes_cloud_connect.py b/containers/backupchecks/src/backend/app/main/routes_cloud_connect.py
new file mode 100644
index 0000000..805c051
--- /dev/null
+++ b/containers/backupchecks/src/backend/app/main/routes_cloud_connect.py
@@ -0,0 +1,119 @@
+"""Veeam Cloud Connect accounts review routes.
+
+Mirrors the Cove Accounts flow:
+ /cloud-connect/accounts – list all accounts (unmatched first)
+ /cloud-connect/accounts//link – link to existing job or create new job
+ /cloud-connect/accounts//unlink – remove the job link
+"""
+from .routes_shared import * # noqa: F401,F403
+from .routes_shared import _log_admin_event
+from ..models import CloudConnectAccount, Customer, Job, JobRun
+
+
+@main_bp.route("/cloud-connect/accounts")
+@login_required
+@roles_required("admin", "operator")
+def cloud_connect_accounts():
+ # Unmatched accounts shown first, then matched — same as Cove Accounts
+ unmatched = (
+ CloudConnectAccount.query
+ .filter(CloudConnectAccount.job_id.is_(None))
+ .order_by(CloudConnectAccount.user.asc(), CloudConnectAccount.section.asc())
+ .all()
+ )
+ matched = (
+ CloudConnectAccount.query
+ .filter(CloudConnectAccount.job_id.isnot(None))
+ .order_by(CloudConnectAccount.user.asc(), CloudConnectAccount.section.asc())
+ .all()
+ )
+
+ customers = Customer.query.filter_by(active=True).order_by(Customer.name.asc()).all()
+ jobs = Job.query.filter_by(archived=False).order_by(Job.job_name.asc()).all()
+
+ # Attach derived fields for the template
+ for acc in unmatched + matched:
+ acc.derived_backup_software = "Veeam"
+ acc.derived_backup_type = (
+ "Cloud Connect Agent" if acc.section == "Agent" else "Cloud Connect Backup"
+ )
+ acc.derived_job_name = acc.user
+
+ return render_template(
+ "main/cloud_connect_accounts.html",
+ unmatched=unmatched,
+ matched=matched,
+ customers=customers,
+ jobs=jobs,
+ )
+
+
+@main_bp.route("/cloud-connect/accounts//link", methods=["POST"])
+@login_required
+@roles_required("admin", "operator")
+def cloud_connect_account_link(cc_account_db_id: int):
+ acc = CloudConnectAccount.query.get_or_404(cc_account_db_id)
+ action = (request.form.get("action") or "").strip() # "create" or "link"
+
+ if action == "create":
+ customer_id = request.form.get("customer_id", type=int)
+ if not customer_id:
+ flash("Please select a customer.", "danger")
+ return redirect(url_for("main.cloud_connect_accounts"))
+
+ customer = Customer.query.get_or_404(customer_id)
+
+ job_name = (request.form.get("job_name") or acc.user).strip()
+ backup_type = (request.form.get("backup_type") or acc.derived_backup_type).strip()
+
+ job = Job(
+ customer_id=customer.id,
+ backup_software="Veeam",
+ backup_type=backup_type,
+ job_name=job_name,
+ )
+ db.session.add(job)
+ db.session.flush()
+
+ acc.job_id = job.id
+ db.session.commit()
+
+ _log_admin_event(
+ event_type="cloud_connect_account_linked",
+ message=f"Cloud Connect account '{acc.user}' ({acc.section}) linked to new job '{job_name}'",
+ details=f"customer={customer.name}, job_name={job_name}",
+ )
+ flash(f"Job '{job_name}' created and linked to '{acc.user}' ({acc.section}).", "success")
+
+ elif action == "link":
+ job_id = request.form.get("job_id", type=int)
+ if not job_id:
+ flash("Please select a job.", "danger")
+ return redirect(url_for("main.cloud_connect_accounts"))
+
+ job = Job.query.get_or_404(job_id)
+ acc.job_id = job.id
+ db.session.commit()
+
+ _log_admin_event(
+ event_type="cloud_connect_account_linked",
+ message=f"Cloud Connect account '{acc.user}' ({acc.section}) linked to existing job '{job.job_name}'",
+ details=f"job_id={job.id}, job_name={job.job_name}",
+ )
+ flash(f"Linked '{acc.user}' ({acc.section}) to job '{job.job_name}'.", "success")
+
+ else:
+ flash("Unknown action.", "danger")
+
+ return redirect(url_for("main.cloud_connect_accounts"))
+
+
+@main_bp.route("/cloud-connect/accounts//unlink", methods=["POST"])
+@login_required
+@roles_required("admin", "operator")
+def cloud_connect_account_unlink(cc_account_db_id: int):
+ acc = CloudConnectAccount.query.get_or_404(cc_account_db_id)
+ acc.job_id = None
+ db.session.commit()
+ flash(f"Unlinked '{acc.user}' ({acc.section}).", "success")
+ return redirect(url_for("main.cloud_connect_accounts"))
diff --git a/containers/backupchecks/src/backend/app/migrations.py b/containers/backupchecks/src/backend/app/migrations.py
index bf939e1..83e631f 100644
--- a/containers/backupchecks/src/backend/app/migrations.py
+++ b/containers/backupchecks/src/backend/app/migrations.py
@@ -1270,6 +1270,48 @@ def migrate_entra_sso_settings() -> None:
print(f"[migrations] Failed to migrate Entra SSO columns: {exc}")
+def migrate_cloud_connect_accounts_table() -> None:
+ """Create the cloud_connect_accounts staging table if it does not exist."""
+ try:
+ engine = db.get_engine()
+ except Exception as exc:
+ print(f"[migrations] Could not get engine for cloud_connect_accounts migration: {exc}")
+ return
+
+ try:
+ with engine.begin() as conn:
+ conn.execute(text("""
+ CREATE TABLE IF NOT EXISTS cloud_connect_accounts (
+ id SERIAL PRIMARY KEY,
+ "user" VARCHAR(255) NOT NULL,
+ section VARCHAR(32) NOT NULL,
+ repo_name VARCHAR(512) NULL,
+ repo_type VARCHAR(255) NULL,
+ num_items VARCHAR(64) NULL,
+ total_quota VARCHAR(32) NULL,
+ used_space VARCHAR(32) NULL,
+ free_space VARCHAR(32) NULL,
+ last_active_raw VARCHAR(64) NULL,
+ last_active_dt TIMESTAMP NULL,
+ last_status VARCHAR(32) NULL,
+ last_mail_message_id INTEGER NULL REFERENCES mail_messages(id) ON DELETE SET NULL,
+ job_id INTEGER NULL REFERENCES jobs(id) ON DELETE SET NULL,
+ first_seen_at TIMESTAMP NOT NULL DEFAULT NOW(),
+ last_seen_at TIMESTAMP NOT NULL DEFAULT NOW(),
+ CONSTRAINT uq_cloud_connect_accounts_user_section UNIQUE ("user", section)
+ )
+ """))
+ conn.execute(text(
+ 'CREATE INDEX IF NOT EXISTS idx_cc_accounts_user ON cloud_connect_accounts ("user")'
+ ))
+ conn.execute(text(
+ "CREATE INDEX IF NOT EXISTS idx_cc_accounts_job_id ON cloud_connect_accounts (job_id)"
+ ))
+ print("[migrations] migrate_cloud_connect_accounts_table completed.")
+ except Exception as exc:
+ print(f"[migrations] Failed to migrate cloud_connect_accounts table: {exc}")
+
+
def run_migrations() -> None:
print("[migrations] Starting migrations...")
migrate_add_username_to_users()
@@ -1315,6 +1357,7 @@ def run_migrations() -> None:
migrate_rename_admin_logs_to_audit_logs()
migrate_cove_integration()
migrate_cove_accounts_table()
+ migrate_cloud_connect_accounts_table()
migrate_entra_sso_settings()
print("[migrations] All migrations completed.")
diff --git a/containers/backupchecks/src/backend/app/models.py b/containers/backupchecks/src/backend/app/models.py
index 05b388d..41bd859 100644
--- a/containers/backupchecks/src/backend/app/models.py
+++ b/containers/backupchecks/src/backend/app/models.py
@@ -382,6 +382,46 @@ class CoveAccount(db.Model):
job = db.relationship("Job", backref=db.backref("cove_account", uselist=False))
+class CloudConnectAccount(db.Model):
+ """Staging table for Veeam Cloud Connect tenant accounts.
+
+ Each row represents one User × section (Backup / Agent) combination
+ as found in the Veeam Cloud Connect daily report email.
+
+ Unlinked accounts (job_id IS NULL) appear on the Cloud Connect Accounts
+ review page where an admin can create or link a Backupchecks job —
+ identical to the Cove Accounts flow.
+ """
+ __tablename__ = "cloud_connect_accounts"
+
+ id = db.Column(db.Integer, primary_key=True)
+
+ user = db.Column(db.String(255), nullable=False)
+ section = db.Column(db.String(32), nullable=False)
+
+ repo_name = db.Column(db.String(512), nullable=True)
+ repo_type = db.Column(db.String(255), nullable=True)
+ num_items = db.Column(db.String(64), nullable=True)
+ total_quota = db.Column(db.String(32), nullable=True)
+ used_space = db.Column(db.String(32), nullable=True)
+ free_space = db.Column(db.String(32), nullable=True)
+ last_active_raw = db.Column(db.String(64), nullable=True)
+ last_active_dt = db.Column(db.DateTime, nullable=True)
+ last_status = db.Column(db.String(32), nullable=True)
+
+ last_mail_message_id = db.Column(db.Integer, db.ForeignKey("mail_messages.id"), nullable=True)
+ job_id = db.Column(db.Integer, db.ForeignKey("jobs.id"), nullable=True)
+
+ first_seen_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
+ last_seen_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
+
+ job = db.relationship("Job", backref=db.backref("cloud_connect_account", uselist=False))
+
+ __table_args__ = (
+ db.UniqueConstraint("user", "section", name="uq_cloud_connect_accounts_user_section"),
+ )
+
+
class JobRunReviewEvent(db.Model):
__tablename__ = "job_run_review_events"
diff --git a/containers/backupchecks/src/templates/layout/base.html b/containers/backupchecks/src/templates/layout/base.html
index 9ab2fd7..dbcf0f4 100644
--- a/containers/backupchecks/src/templates/layout/base.html
+++ b/containers/backupchecks/src/templates/layout/base.html
@@ -111,6 +111,9 @@
{% if system_settings and system_settings.cove_enabled and active_role in ('admin', 'operator') %}
{{ bc_nav_item('main.cove_accounts', 'Cove Accounts', icon_cloud()) }}
{% endif %}
+ {% if active_role in ('admin', 'operator') %}
+ {{ bc_nav_item('main.cloud_connect_accounts', 'Cloud Connect', icon_server()) }}
+ {% endif %}
Info
diff --git a/containers/backupchecks/src/templates/main/cloud_connect_accounts.html b/containers/backupchecks/src/templates/main/cloud_connect_accounts.html
new file mode 100644
index 0000000..c95ef78
--- /dev/null
+++ b/containers/backupchecks/src/templates/main/cloud_connect_accounts.html
@@ -0,0 +1,211 @@
+{% extends "layout/base.html" %}
+{% block content %}
+
+
Cloud Connect Accounts
+
+
+{# ── Unmatched accounts ─────────────────────────────────────────────────── #}
+{% if unmatched %}
+Unmatched {{ unmatched|length }}
+These accounts have no linked job yet. Create a new job or link to an existing one.
+
+
+
+
+
+ User
+ Section
+ Repository
+ Used / Quota
+ Free
+ Last active
+ Status
+ First seen
+
+
+
+
+ {% for acc in unmatched %}
+
+ {{ acc.user }}
+ {{ acc.section }}
+ {{ acc.repo_name or '—' }}{{ acc.repo_type or '' }}
+ {{ acc.used_space or '—' }} / {{ acc.total_quota or '—' }}
+ {{ acc.free_space or '—' }}
+ {{ acc.last_active_raw or '—' }}
+
+ {% if acc.last_status == 'Failed' %}
+ Failed
+ {% elif acc.last_status == 'Warning' %}
+ Warning
+ {% else %}
+ Success
+ {% endif %}
+
+ {{ acc.first_seen_at|local_datetime }}
+
+
+ Link / Create job
+
+
+
+
+ {# Link modal #}
+
+
+
+
+
+
+
+
+ Create new job
+
+
+
+
+ Link to existing job
+
+
+
+
+
+ {# Tab 1: Create new job #}
+
+
+ {# Tab 2: Link to existing job #}
+
+
+
+
+
+
+ {% endfor %}
+
+
+
+{% else %}
+
+ All accounts matched. No unmatched Cloud Connect accounts.
+
+{% endif %}
+
+{# ── Matched accounts ───────────────────────────────────────────────────── #}
+{% if matched %}
+Linked {{ matched|length }}
+
+
+
+
+ User
+ Section
+ Repository
+ Used / Quota
+ Free
+ Last active
+ Status
+ Linked job
+
+
+
+
+ {% for acc in matched %}
+
+ {{ acc.user }}
+ {{ acc.section }}
+ {{ acc.repo_name or '—' }}{{ acc.repo_type or '' }}
+ {{ acc.used_space or '—' }} / {{ acc.total_quota or '—' }}
+ {{ acc.free_space or '—' }}
+ {{ acc.last_active_raw or '—' }}
+
+ {% if acc.last_status == 'Failed' %}
+ Failed
+ {% elif acc.last_status == 'Warning' %}
+ Warning
+ {% else %}
+ Success
+ {% endif %}
+
+
+ {% if acc.job %}
+
+ {{ acc.job.customer.name ~ ' – ' if acc.job.customer else '' }}{{ acc.job.job_name }}
+
+ {% else %}—{% endif %}
+
+
+
+
+
+ {% endfor %}
+
+
+
+{% endif %}
+
+{% if not unmatched and not matched %}
+
+ No Cloud Connect accounts found yet. They appear here automatically after the first daily report email is imported.
+
+{% endif %}
+
+{% endblock %}
diff --git a/docs/changelog-claude.md b/docs/changelog-claude.md
index d4ddc67..3920e09 100644
--- a/docs/changelog-claude.md
+++ b/docs/changelog-claude.md
@@ -2,6 +2,18 @@
This file documents all changes made to this project via Claude Code.
+## [2026-03-19] (2)
+
+### Added
+- Veeam Cloud Connect importer — same inbox-style staging flow as Cove Data Protection:
+ - `app/cloud_connect_importer.py` — HTML parser for Cloud Connect daily report emails, upserts tenant rows into `cloud_connect_accounts`, creates `JobRun` records for linked accounts
+ - `app/main/routes_cloud_connect.py` — `/cloud-connect/accounts` page with link/unlink actions (create new job or link to existing)
+ - `templates/main/cloud_connect_accounts.html` — inbox-style page: unmatched accounts first, matched accounts below
+ - `CloudConnectAccount` model added to `models.py` (staging table, unique on user × section)
+ - `migrate_cloud_connect_accounts_table()` added to `migrations.py`, registered in `run_all_migrations()`
+ - `mail_importer.py` — Cloud Connect hook: detects `backup_type == "cloud connect report"`, calls `upsert_cloud_connect_report()`, auto-approves mail on success
+ - Sidebar link "Cloud Connect" added for admin/operator roles
+
## [2026-03-19]
### Changed