diff --git a/css/app.css b/css/app.css
index 096a54a..afda024 100644
--- a/css/app.css
+++ b/css/app.css
@@ -763,4 +763,91 @@ select:focus { border-color: var(--accent); }
/* Toast — stretch across bottom */
.toast { right: 16px; left: 16px; bottom: 16px; }
+
+ /* Jobs — stack sidebar above detail */
+ .jobs-layout { grid-template-columns: 1fr; }
+ .jobs-sidebar { border-right: none; border-bottom: 1px solid var(--border); }
}
+
+/* ── JOBS PAGE ───────────────────────────────────────────────────────────────── */
+
+.jobs-layout {
+ display: grid;
+ grid-template-columns: 220px 1fr;
+ height: calc(100vh - 48px);
+}
+.jobs-sidebar {
+ border-right: 1px solid var(--border);
+ overflow-y: auto;
+}
+.jobs-sidebar-title {
+ padding: 16px 16px 8px;
+ font-size: 10px;
+ font-weight: 600;
+ text-transform: uppercase;
+ letter-spacing: 0.1em;
+ color: var(--text3);
+}
+.job-item {
+ display: flex;
+ align-items: center;
+ gap: 10px;
+ padding: 12px 16px;
+ cursor: pointer;
+ border-bottom: 1px solid var(--border);
+ user-select: none;
+}
+.job-item:hover, .job-item.active { background: var(--bg2); }
+.job-item-name { font-size: 13px; color: var(--text); }
+.jobs-detail {
+ padding: 28px 32px;
+ overflow-y: auto;
+ max-width: 600px;
+}
+.jobs-detail-hd { margin-bottom: 20px; }
+.jobs-detail-title { font-size: 17px; font-weight: 600; color: var(--text); }
+.jobs-detail-desc { font-size: 12px; color: var(--text2); margin-top: 4px; line-height: 1.6; }
+.job-actions { display: flex; gap: 8px; margin: 16px 0 0; }
+.jobs-placeholder { padding: 48px 32px; color: var(--text3); font-size: 13px; }
+
+/* Shared job status dot */
+.job-dot {
+ width: 7px;
+ height: 7px;
+ border-radius: 50%;
+ flex-shrink: 0;
+ display: inline-block;
+}
+.job-dot--success { background: var(--accent); }
+.job-dot--error { background: var(--red); }
+.job-dot--running { background: var(--amber); animation: pulse 2s ease-in-out infinite; }
+.job-dot--none { background: var(--border2); }
+
+/* Run history list */
+.run-item {
+ display: grid;
+ grid-template-columns: 10px 1fr 60px 1fr;
+ gap: 0 12px;
+ padding: 7px 0;
+ border-bottom: 1px solid var(--border);
+ font-size: 12px;
+ align-items: baseline;
+}
+.run-item:last-child { border-bottom: none; }
+.run-time { color: var(--text3); }
+.run-status { color: var(--text2); }
+.run-result { color: var(--text); }
+.run-empty { color: var(--text3); font-size: 12px; padding: 8px 0; }
+
+/* Nav dot */
+.nav-job-dot {
+ display: inline-block;
+ width: 6px;
+ height: 6px;
+ border-radius: 50%;
+ margin-left: 5px;
+ vertical-align: middle;
+}
+.nav-job-dot--success { background: var(--accent); }
+.nav-job-dot--error { background: var(--red); }
+.nav-job-dot--none { display: none; }
diff --git a/index.html b/index.html
index 246f199..a0dcc88 100644
--- a/index.html
+++ b/index.html
@@ -22,6 +22,7 @@
·
+
@@ -69,7 +70,6 @@
—
vmid —
- id —
created —
@@ -98,6 +98,19 @@
+
+
+
diff --git a/js/app.js b/js/app.js
index 1380b38..e389242 100644
--- a/js/app.js
+++ b/js/app.js
@@ -11,12 +11,19 @@ function navigate(page, vmid) {
document.getElementById('page-detail').classList.add('active');
history.pushState({ page: 'instance', vmid }, '', `/instance/${vmid}`);
renderDetailPage(vmid);
+ } else if (page === 'jobs') {
+ document.getElementById('page-jobs').classList.add('active');
+ history.pushState({ page: 'jobs' }, '', '/jobs');
+ renderJobsPage();
}
}
function handleRoute() {
const m = window.location.pathname.match(/^\/instance\/(\d+)/);
- if (m) {
+ if (window.location.pathname === '/jobs') {
+ document.getElementById('page-jobs').classList.add('active');
+ renderJobsPage();
+ } else if (m) {
document.getElementById('page-detail').classList.add('active');
renderDetailPage(parseInt(m[1], 10));
} else {
@@ -30,6 +37,9 @@ window.addEventListener('popstate', e => {
if (e.state?.page === 'instance') {
document.getElementById('page-detail').classList.add('active');
renderDetailPage(e.state.vmid);
+ } else if (e.state?.page === 'jobs') {
+ document.getElementById('page-jobs').classList.add('active');
+ renderJobsPage();
} else {
document.getElementById('page-dashboard').classList.add('active');
renderDashboard();
diff --git a/js/ui.js b/js/ui.js
index 09b8807..7156fc5 100644
--- a/js/ui.js
+++ b/js/ui.js
@@ -172,7 +172,6 @@ async function renderDetailPage(vmid) {
document.getElementById('detail-vmid-crumb').textContent = vmid;
document.getElementById('detail-name').textContent = inst.name;
document.getElementById('detail-vmid-sub').textContent = inst.vmid;
- document.getElementById('detail-id-sub').textContent = inst.id;
document.getElementById('detail-created-sub').textContent = fmtDate(inst.created_at);
document.getElementById('detail-identity').innerHTML = `
@@ -180,7 +179,6 @@ async function renderDetailPage(vmid) {
state${esc(inst.state)}
stack${esc(inst.stack) || '—'}
vmid${inst.vmid}
- internal id${inst.id}
`;
document.getElementById('detail-network').innerHTML = `
@@ -384,15 +382,17 @@ async function importDB() {
document.getElementById('confirm-ok').onclick = async () => {
closeConfirm();
try {
- const { instances, history = [] } = JSON.parse(await file.text());
+ const { instances, history = [], jobs, job_runs } = JSON.parse(await file.text());
const res = await fetch('/api/import', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({ instances, history }),
+ body: JSON.stringify({ instances, history, jobs, job_runs }),
});
const data = await res.json();
if (!res.ok) { showToast(data.error ?? 'Import failed', 'error'); return; }
- showToast(`Imported ${data.imported} instance${data.imported !== 1 ? 's' : ''}`, 'success');
+ const parts = [`${data.imported} instance${data.imported !== 1 ? 's' : ''}`];
+ if (data.imported_jobs != null) parts.push(`${data.imported_jobs} job${data.imported_jobs !== 1 ? 's' : ''}`);
+ showToast(`Imported ${parts.join(', ')}`, 'success');
closeSettingsModal();
renderDashboard();
} catch {
@@ -426,3 +426,131 @@ document.getElementById('tz-select').addEventListener('change', e => {
if (m) renderDetailPage(parseInt(m[1], 10));
else renderDashboard();
});
+
+// ── Jobs Page ─────────────────────────────────────────────────────────────────
+
+async function renderJobsPage() {
+ const jobs = await fetch('/api/jobs').then(r => r.json());
+ _updateJobsNavDot(jobs);
+ document.getElementById('jobs-list').innerHTML = jobs.length
+ ? jobs.map(j => `
+
+
+ ${esc(j.name)}
+
`).join('')
+ : 'No jobs
';
+ if (jobs.length) loadJobDetail(jobs[0].id);
+}
+
+async function loadJobDetail(jobId) {
+ document.querySelectorAll('.job-item').forEach(el => el.classList.remove('active'));
+ document.getElementById(`job-item-${jobId}`)?.classList.add('active');
+ const job = await fetch(`/api/jobs/${jobId}`).then(r => r.json());
+ const cfg = job.config ?? {};
+ document.getElementById('jobs-detail').innerHTML = `
+
+
${esc(job.name)}
+
${esc(job.description)}
+
+
+
+
+
+
+
+
+ ${_renderJobConfigFields(job.key, cfg)}
+
+
+
+
+ Run History
+ ${_renderRunList(job.runs)}
+ `;
+}
+
+function _renderJobConfigFields(key, cfg) {
+ if (key === 'tailscale_sync') return `
+
+
+
+
+
+
+
+
`;
+ if (key === 'patchmon_sync' || key === 'semaphore_sync') {
+ const label = key === 'semaphore_sync' ? 'API Token (Bearer)' : 'API Token (Basic)';
+ return `
+
+
+
+
+
+
+
+
`;
+ }
+ return '';
+}
+
+function _renderRunList(runs) {
+ if (!runs?.length) return 'No runs yet
';
+ return `${runs.map(r => `
+
+
+ ${fmtDateFull(r.started_at)}
+ ${esc(r.status)}
+ ${esc(r.result)}
+
`).join('')}
`;
+}
+
+async function saveJobDetail(jobId) {
+ const enabled = document.getElementById('job-enabled').checked;
+ const schedule = document.getElementById('job-schedule').value;
+ const cfg = {};
+ const tailnet = document.getElementById('job-cfg-tailnet');
+ const apiKey = document.getElementById('job-cfg-api-key');
+ const apiUrl = document.getElementById('job-cfg-api-url');
+ const apiToken = document.getElementById('job-cfg-api-token');
+ if (tailnet) cfg.tailnet = tailnet.value.trim();
+ if (apiKey) cfg.api_key = apiKey.value;
+ if (apiUrl) cfg.api_url = apiUrl.value.trim();
+ if (apiToken) cfg.api_token = apiToken.value;
+ const res = await fetch(`/api/jobs/${jobId}`, {
+ method: 'PUT',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ enabled, schedule: parseInt(schedule, 10), config: cfg }),
+ });
+ if (res.ok) { showToast('Job saved', 'success'); loadJobDetail(jobId); }
+ else { showToast('Failed to save', 'error'); }
+}
+
+async function runJobNow(jobId) {
+ const btn = document.getElementById('job-run-btn');
+ btn.disabled = true;
+ btn.textContent = 'Running…';
+ try {
+ const res = await fetch(`/api/jobs/${jobId}/run`, { method: 'POST' });
+ const data = await res.json();
+ if (res.ok) { showToast(`Done — ${data.summary}`, 'success'); loadJobDetail(jobId); }
+ else { showToast(data.error ?? 'Run failed', 'error'); }
+ } catch { showToast('Run failed', 'error'); }
+ finally { btn.disabled = false; btn.textContent = 'Run Now'; }
+}
+
+function _updateJobsNavDot(jobs) {
+ const dot = document.getElementById('nav-jobs-dot');
+ const cls = jobs.some(j => j.last_status === 'error') ? 'error'
+ : jobs.some(j => j.last_status === 'success') ? 'success'
+ : 'none';
+ dot.className = `nav-job-dot nav-job-dot--${cls}`;
+}
diff --git a/js/version.js b/js/version.js
index 7f8fb21..2b45f39 100644
--- a/js/version.js
+++ b/js/version.js
@@ -1 +1 @@
-const VERSION = "1.4.0";
+const VERSION = "1.5.0";
diff --git a/package.json b/package.json
index 252f229..9a08c3e 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "catalyst",
- "version": "1.4.0",
+ "version": "1.5.0",
"type": "module",
"scripts": {
"start": "node server/server.js",
diff --git a/server/db.js b/server/db.js
index 645f852..52ef555 100644
--- a/server/db.js
+++ b/server/db.js
@@ -17,7 +17,7 @@ function init(path) {
db.exec('PRAGMA foreign_keys = ON');
db.exec('PRAGMA synchronous = NORMAL');
createSchema();
- if (path !== ':memory:') seed();
+ if (path !== ':memory:') { seed(); seedJobs(); }
}
function createSchema() {
@@ -53,6 +53,31 @@ function createSchema() {
changed_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE INDEX IF NOT EXISTS idx_history_vmid ON instance_history(vmid);
+
+ CREATE TABLE IF NOT EXISTS config (
+ key TEXT PRIMARY KEY,
+ value TEXT NOT NULL DEFAULT ''
+ );
+
+ CREATE TABLE IF NOT EXISTS jobs (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ key TEXT NOT NULL UNIQUE,
+ name TEXT NOT NULL,
+ description TEXT NOT NULL DEFAULT '',
+ enabled INTEGER NOT NULL DEFAULT 0 CHECK(enabled IN (0,1)),
+ schedule INTEGER NOT NULL DEFAULT 15,
+ config TEXT NOT NULL DEFAULT '{}'
+ );
+
+ CREATE TABLE IF NOT EXISTS job_runs (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ job_id INTEGER NOT NULL,
+ started_at TEXT NOT NULL DEFAULT (datetime('now')),
+ ended_at TEXT,
+ status TEXT NOT NULL DEFAULT 'running' CHECK(status IN ('running','success','error')),
+ result TEXT NOT NULL DEFAULT ''
+ );
+ CREATE INDEX IF NOT EXISTS idx_job_runs_job_id ON job_runs(job_id);
`);
}
@@ -83,6 +108,29 @@ function seed() {
db.exec('COMMIT');
}
+function seedJobs() {
+ const upsert = db.prepare(`
+ INSERT OR IGNORE INTO jobs (key, name, description, enabled, schedule, config)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `);
+
+ const apiKey = getConfig('tailscale_api_key');
+ const tailnet = getConfig('tailscale_tailnet');
+ const tsSchedule = parseInt(getConfig('tailscale_poll_minutes', '15'), 10) || 15;
+ const tsEnabled = getConfig('tailscale_enabled') === '1' ? 1 : 0;
+ upsert.run('tailscale_sync', 'Tailscale Sync',
+ 'Syncs Tailscale device status and IPs to instances by matching hostnames.',
+ tsEnabled, tsSchedule, JSON.stringify({ api_key: apiKey, tailnet }));
+
+ upsert.run('patchmon_sync', 'Patchmon Sync',
+ 'Syncs Patchmon host registration status to instances by matching hostnames.',
+ 0, 60, JSON.stringify({ api_url: 'http://patchmon:3000/api/v1/api/hosts', api_token: '' }));
+
+ upsert.run('semaphore_sync', 'Semaphore Sync',
+ 'Syncs Semaphore inventory membership to instances by matching hostnames.',
+ 0, 60, JSON.stringify({ api_url: 'http://semaphore:3000/api/project/1/inventory/1', api_token: '' }));
+}
+
// ── Queries ───────────────────────────────────────────────────────────────────
export function getInstances(filters = {}) {
@@ -187,6 +235,93 @@ export function getAllHistory() {
return db.prepare('SELECT * FROM instance_history ORDER BY vmid, changed_at').all();
}
+export function getAllJobs() {
+ return db.prepare('SELECT id, key, name, description, enabled, schedule, config FROM jobs ORDER BY id').all();
+}
+
+export function getAllJobRuns() {
+ return db.prepare('SELECT * FROM job_runs ORDER BY job_id, id').all();
+}
+
+export function importJobs(jobRows, jobRunRows = []) {
+ db.exec('BEGIN');
+ db.exec('DELETE FROM job_runs');
+ db.exec('DELETE FROM jobs');
+ const insertJob = db.prepare(`
+ INSERT INTO jobs (id, key, name, description, enabled, schedule, config)
+ VALUES (@id, @key, @name, @description, @enabled, @schedule, @config)
+ `);
+ for (const j of jobRows) insertJob.run(j);
+ if (jobRunRows.length) {
+ const insertRun = db.prepare(`
+ INSERT INTO job_runs (id, job_id, started_at, ended_at, status, result)
+ VALUES (@id, @job_id, @started_at, @ended_at, @status, @result)
+ `);
+ for (const r of jobRunRows) insertRun.run(r);
+ }
+ db.exec('COMMIT');
+}
+
+export function getConfig(key, defaultVal = '') {
+ const row = db.prepare('SELECT value FROM config WHERE key = ?').get(key);
+ return row ? row.value : defaultVal;
+}
+
+export function setConfig(key, value) {
+ db.prepare(
+ `INSERT INTO config (key, value) VALUES (?, ?)
+ ON CONFLICT(key) DO UPDATE SET value = excluded.value`
+ ).run(key, String(value));
+}
+
+// ── Jobs ──────────────────────────────────────────────────────────────────────
+
+const JOB_WITH_LAST_RUN = `
+ SELECT j.*,
+ r.id AS last_run_id,
+ r.started_at AS last_run_at,
+ r.status AS last_status,
+ r.result AS last_result
+ FROM jobs j
+ LEFT JOIN job_runs r
+ ON r.id = (SELECT id FROM job_runs WHERE job_id = j.id ORDER BY id DESC LIMIT 1)
+`;
+
+export function getJobs() {
+ return db.prepare(JOB_WITH_LAST_RUN + ' ORDER BY j.id').all();
+}
+
+export function getJob(id) {
+ return db.prepare(JOB_WITH_LAST_RUN + ' WHERE j.id = ?').get(id) ?? null;
+}
+
+export function createJob(data) {
+ db.prepare(`
+ INSERT INTO jobs (key, name, description, enabled, schedule, config)
+ VALUES (@key, @name, @description, @enabled, @schedule, @config)
+ `).run(data);
+}
+
+export function updateJob(id, { enabled, schedule, config }) {
+ db.prepare(`
+ UPDATE jobs SET enabled=@enabled, schedule=@schedule, config=@config WHERE id=@id
+ `).run({ id, enabled, schedule, config });
+}
+
+export function createJobRun(jobId) {
+ return Number(db.prepare('INSERT INTO job_runs (job_id) VALUES (?)').run(jobId).lastInsertRowid);
+}
+
+export function completeJobRun(runId, status, result) {
+ db.prepare(`
+ UPDATE job_runs SET ended_at=datetime('now'), status=@status, result=@result WHERE id=@id
+ `).run({ id: runId, status, result });
+}
+
+export function getJobRuns(jobId) {
+ return db.prepare('SELECT * FROM job_runs WHERE job_id = ? ORDER BY id DESC').all(jobId);
+}
+
// ── Test helpers ──────────────────────────────────────────────────────────────
export function _resetForTest() {
diff --git a/server/jobs.js b/server/jobs.js
new file mode 100644
index 0000000..cfcb20e
--- /dev/null
+++ b/server/jobs.js
@@ -0,0 +1,141 @@
+import { getJobs, getJob, getInstances, updateInstance, createJobRun, completeJobRun } from './db.js';
+
+// ── Handlers ──────────────────────────────────────────────────────────────────
+
+const TAILSCALE_API = 'https://api.tailscale.com/api/v2';
+
+async function tailscaleSyncHandler(cfg) {
+ const { api_key, tailnet } = cfg;
+ if (!api_key || !tailnet) throw new Error('Tailscale not configured — set API key and tailnet');
+
+ const res = await fetch(
+ `${TAILSCALE_API}/tailnet/${encodeURIComponent(tailnet)}/devices`,
+ { headers: { Authorization: `Bearer ${api_key}` } }
+ );
+ if (!res.ok) throw new Error(`Tailscale API ${res.status}`);
+
+ const { devices } = await res.json();
+ const tsMap = new Map(
+ devices.map(d => [d.hostname, (d.addresses ?? []).find(a => a.startsWith('100.')) ?? ''])
+ );
+
+ const instances = getInstances();
+ let updated = 0;
+ for (const inst of instances) {
+ const tsIp = tsMap.get(inst.name);
+ const matched = tsIp !== undefined;
+ const newTailscale = matched ? 1 : (inst.tailscale === 1 ? 0 : inst.tailscale);
+ const newIp = matched ? tsIp : (inst.tailscale === 1 ? '' : inst.tailscale_ip);
+ if (newTailscale !== inst.tailscale || newIp !== inst.tailscale_ip) {
+ const { id: _id, created_at: _ca, updated_at: _ua, ...instData } = inst;
+ updateInstance(inst.vmid, { ...instData, tailscale: newTailscale, tailscale_ip: newIp });
+ updated++;
+ }
+ }
+ return { summary: `${updated} updated of ${instances.length}` };
+}
+
+// ── Patchmon Sync ─────────────────────────────────────────────────────────────
+
+async function patchmonSyncHandler(cfg) {
+ const { api_url, api_token } = cfg;
+ if (!api_url || !api_token) throw new Error('Patchmon not configured — set API URL and token');
+
+ const res = await fetch(api_url, {
+ headers: { Authorization: `Basic ${api_token}` },
+ });
+ if (!res.ok) throw new Error(`Patchmon API ${res.status}`);
+
+ const data = await res.json();
+ const items = Array.isArray(data) ? data : (data.hosts ?? data.data ?? []);
+ const hostSet = new Set(
+ items.map(h => (typeof h === 'string' ? h : (h.name ?? h.hostname ?? h.host ?? '')))
+ .filter(Boolean)
+ );
+
+ const instances = getInstances();
+ let updated = 0;
+ for (const inst of instances) {
+ const newPatchmon = hostSet.has(inst.name) ? 1 : 0;
+ if (newPatchmon !== inst.patchmon) {
+ const { id: _id, created_at: _ca, updated_at: _ua, ...instData } = inst;
+ updateInstance(inst.vmid, { ...instData, patchmon: newPatchmon });
+ updated++;
+ }
+ }
+ return { summary: `${updated} updated of ${instances.length}` };
+}
+
+// ── Semaphore Sync ────────────────────────────────────────────────────────────
+
+async function semaphoreSyncHandler(cfg) {
+ const { api_url, api_token } = cfg;
+ if (!api_url || !api_token) throw new Error('Semaphore not configured — set API URL and token');
+
+ const res = await fetch(api_url, {
+ headers: { Authorization: `Bearer ${api_token}` },
+ });
+ if (!res.ok) throw new Error(`Semaphore API ${res.status}`);
+
+ const data = await res.json();
+ // Inventory is an Ansible INI string; extract bare hostnames
+ const hostSet = new Set(
+ (data.inventory ?? '').split('\n')
+ .map(l => l.trim())
+ .filter(l => l && !l.startsWith('[') && !l.startsWith('#') && !l.startsWith(';'))
+ .map(l => l.split(/[\s=]/)[0])
+ .filter(Boolean)
+ );
+
+ const instances = getInstances();
+ let updated = 0;
+ for (const inst of instances) {
+ const newSemaphore = hostSet.has(inst.name) ? 1 : 0;
+ if (newSemaphore !== inst.semaphore) {
+ const { id: _id, created_at: _ca, updated_at: _ua, ...instData } = inst;
+ updateInstance(inst.vmid, { ...instData, semaphore: newSemaphore });
+ updated++;
+ }
+ }
+ return { summary: `${updated} updated of ${instances.length}` };
+}
+
+// ── Registry ──────────────────────────────────────────────────────────────────
+
+const HANDLERS = {
+ tailscale_sync: tailscaleSyncHandler,
+ patchmon_sync: patchmonSyncHandler,
+ semaphore_sync: semaphoreSyncHandler,
+};
+
+// ── Public API ────────────────────────────────────────────────────────────────
+
+export async function runJob(jobId) {
+ const job = getJob(jobId);
+ if (!job) throw new Error('Job not found');
+ const handler = HANDLERS[job.key];
+ if (!handler) throw new Error(`No handler for '${job.key}'`);
+ const cfg = JSON.parse(job.config || '{}');
+ const runId = createJobRun(jobId);
+ try {
+ const result = await handler(cfg);
+ completeJobRun(runId, 'success', result.summary ?? '');
+ return result;
+ } catch (e) {
+ completeJobRun(runId, 'error', e.message);
+ throw e;
+ }
+}
+
+const _intervals = new Map();
+
+export function restartJobs() {
+ for (const iv of _intervals.values()) clearInterval(iv);
+ _intervals.clear();
+ for (const job of getJobs()) {
+ if (!job.enabled) continue;
+ const ms = Math.max(1, job.schedule || 15) * 60_000;
+ const id = job.id;
+ _intervals.set(id, setInterval(() => runJob(id).catch(() => {}), ms));
+ }
+}
diff --git a/server/routes.js b/server/routes.js
index eb77abf..eed881c 100644
--- a/server/routes.js
+++ b/server/routes.js
@@ -2,7 +2,10 @@ import { Router } from 'express';
import {
getInstances, getInstance, getDistinctStacks,
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory, getAllHistory,
+ getConfig, setConfig, getJobs, getJob, updateJob, getJobRuns,
+ getAllJobs, getAllJobRuns, importJobs,
} from './db.js';
+import { runJob, restartJobs } from './jobs.js';
export const router = Router();
@@ -12,6 +15,15 @@ const VALID_STATES = ['deployed', 'testing', 'degraded'];
const VALID_STACKS = ['production', 'development'];
const SERVICE_KEYS = ['atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda'];
+const REDACTED = '**REDACTED**';
+
+function maskJob(job) {
+ const cfg = JSON.parse(job.config || '{}');
+ if (cfg.api_key) cfg.api_key = REDACTED;
+ if (cfg.api_token) cfg.api_token = REDACTED;
+ return { ...job, config: cfg };
+}
+
function validate(body) {
const errors = [];
if (!body.name || typeof body.name !== 'string' || !body.name.trim())
@@ -116,15 +128,17 @@ router.put('/instances/:vmid', (req, res) => {
// GET /api/export
router.get('/export', (_req, res) => {
const instances = getInstances();
- const history = getAllHistory();
+ const history = getAllHistory();
+ const jobs = getAllJobs();
+ const job_runs = getAllJobRuns();
const date = new Date().toISOString().slice(0, 10);
res.setHeader('Content-Disposition', `attachment; filename="catalyst-backup-${date}.json"`);
- res.json({ version: 2, exported_at: new Date().toISOString(), instances, history });
+ res.json({ version: 3, exported_at: new Date().toISOString(), instances, history, jobs, job_runs });
});
// POST /api/import
router.post('/import', (req, res) => {
- const { instances, history = [] } = req.body ?? {};
+ const { instances, history = [], jobs, job_runs } = req.body ?? {};
if (!Array.isArray(instances)) {
return res.status(400).json({ error: 'body must contain an instances array' });
}
@@ -136,7 +150,14 @@ router.post('/import', (req, res) => {
if (errors.length) return res.status(400).json({ errors });
try {
importInstances(instances.map(normalise), Array.isArray(history) ? history : []);
- res.json({ imported: instances.length });
+ if (Array.isArray(jobs)) {
+ importJobs(jobs, Array.isArray(job_runs) ? job_runs : []);
+ try { restartJobs(); } catch (e) { console.error('POST /api/import restartJobs', e); }
+ }
+ res.json({
+ imported: instances.length,
+ imported_jobs: Array.isArray(jobs) ? jobs.length : undefined,
+ });
} catch (e) {
console.error('POST /api/import', e);
res.status(500).json({ error: 'internal server error' });
@@ -160,3 +181,49 @@ router.delete('/instances/:vmid', (req, res) => {
handleDbError('DELETE /api/instances/:vmid', e, res);
}
});
+
+// GET /api/jobs
+router.get('/jobs', (_req, res) => {
+ res.json(getJobs().map(maskJob));
+});
+
+// GET /api/jobs/:id
+router.get('/jobs/:id', (req, res) => {
+ const id = parseInt(req.params.id, 10);
+ if (!id) return res.status(400).json({ error: 'invalid id' });
+ const job = getJob(id);
+ if (!job) return res.status(404).json({ error: 'job not found' });
+ res.json({ ...maskJob(job), runs: getJobRuns(id) });
+});
+
+// PUT /api/jobs/:id
+router.put('/jobs/:id', (req, res) => {
+ const id = parseInt(req.params.id, 10);
+ if (!id) return res.status(400).json({ error: 'invalid id' });
+ const job = getJob(id);
+ if (!job) return res.status(404).json({ error: 'job not found' });
+ const { enabled, schedule, config: newCfg } = req.body ?? {};
+ const existingCfg = JSON.parse(job.config || '{}');
+ const mergedCfg = { ...existingCfg, ...(newCfg ?? {}) };
+ if (newCfg?.api_key === REDACTED) mergedCfg.api_key = existingCfg.api_key;
+ if (newCfg?.api_token === REDACTED) mergedCfg.api_token = existingCfg.api_token;
+ updateJob(id, {
+ enabled: enabled != null ? (enabled ? 1 : 0) : job.enabled,
+ schedule: schedule != null ? (parseInt(schedule, 10) || 15) : job.schedule,
+ config: JSON.stringify(mergedCfg),
+ });
+ try { restartJobs(); } catch (e) { console.error('PUT /api/jobs/:id restartJobs', e); }
+ res.json(maskJob(getJob(id)));
+});
+
+// POST /api/jobs/:id/run
+router.post('/jobs/:id/run', async (req, res) => {
+ const id = parseInt(req.params.id, 10);
+ if (!id) return res.status(400).json({ error: 'invalid id' });
+ if (!getJob(id)) return res.status(404).json({ error: 'job not found' });
+ try {
+ res.json(await runJob(id));
+ } catch (e) {
+ handleDbError('POST /api/jobs/:id/run', e, res);
+ }
+});
diff --git a/server/server.js b/server/server.js
index 5f83e90..b1a05e4 100644
--- a/server/server.js
+++ b/server/server.js
@@ -3,6 +3,7 @@ import helmet from 'helmet';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { router } from './routes.js';
+import { restartJobs } from './jobs.js';
const __dirname = dirname(fileURLToPath(import.meta.url));
const PORT = process.env.PORT ?? 3000;
@@ -47,5 +48,6 @@ app.use((err, _req, res, _next) => {
// Boot — only when run directly, not when imported by tests
if (process.argv[1] === fileURLToPath(import.meta.url)) {
+ restartJobs();
app.listen(PORT, () => console.log(`catalyst on :${PORT}`));
}
diff --git a/tests/api.test.js b/tests/api.test.js
index 2e9d8f5..6b7e652 100644
--- a/tests/api.test.js
+++ b/tests/api.test.js
@@ -1,7 +1,7 @@
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
import request from 'supertest'
import { app } from '../server/server.js'
-import { _resetForTest } from '../server/db.js'
+import { _resetForTest, createJob } from '../server/db.js'
import * as dbModule from '../server/db.js'
beforeEach(() => _resetForTest())
@@ -276,9 +276,9 @@ describe('GET /api/export', () => {
expect(res.body.instances).toEqual([])
})
- it('returns version 2', async () => {
+ it('returns version 3', async () => {
const res = await request(app).get('/api/export')
- expect(res.body.version).toBe(2)
+ expect(res.body.version).toBe(3)
})
it('includes a history array', async () => {
@@ -287,6 +287,21 @@ describe('GET /api/export', () => {
expect(res.body.history).toBeInstanceOf(Array)
expect(res.body.history.some(e => e.field === 'created')).toBe(true)
})
+
+ it('includes jobs and job_runs arrays', async () => {
+ createJob(testJob)
+ const res = await request(app).get('/api/export')
+ expect(res.body.jobs).toBeInstanceOf(Array)
+ expect(res.body.jobs).toHaveLength(1)
+ expect(res.body.jobs[0].key).toBe('tailscale_sync')
+ expect(res.body.job_runs).toBeInstanceOf(Array)
+ })
+
+ it('exports raw job config without masking', async () => {
+ createJob(testJob)
+ const res = await request(app).get('/api/export')
+ expect(res.body.jobs[0].config).toContain('tskey-test')
+ })
})
// ── POST /api/import ──────────────────────────────────────────────────────────
@@ -341,6 +356,28 @@ describe('POST /api/import', () => {
expect(res.status).toBe(200)
expect(res.body.imported).toBe(1)
})
+
+ it('imports jobs and job_runs and returns imported_jobs count', async () => {
+ const exp = await request(app).get('/api/export')
+ createJob(testJob)
+ const fullExport = await request(app).get('/api/export')
+ const res = await request(app).post('/api/import').send({
+ instances: fullExport.body.instances,
+ history: fullExport.body.history,
+ jobs: fullExport.body.jobs,
+ job_runs: fullExport.body.job_runs,
+ })
+ expect(res.status).toBe(200)
+ expect(res.body.imported_jobs).toBe(1)
+ expect((await request(app).get('/api/jobs')).body).toHaveLength(1)
+ })
+
+ it('leaves jobs untouched when no jobs key in payload', async () => {
+ createJob(testJob)
+ await request(app).post('/api/import')
+ .send({ instances: [{ ...base, vmid: 1, name: 'x' }] })
+ expect((await request(app).get('/api/jobs')).body).toHaveLength(1)
+ })
})
// ── Static assets & SPA routing ───────────────────────────────────────────────
@@ -453,3 +490,153 @@ describe('error handling — unexpected DB failures', () => {
)
})
})
+
+const testJob = {
+ key: 'tailscale_sync', name: 'Tailscale Sync', description: 'Test job',
+ enabled: 0, schedule: 15,
+ config: JSON.stringify({ api_key: 'tskey-test', tailnet: 'example.com' }),
+}
+
+const patchmonJob = {
+ key: 'patchmon_sync', name: 'Patchmon Sync', description: 'Test patchmon job',
+ enabled: 0, schedule: 60,
+ config: JSON.stringify({ api_url: 'http://patchmon:3000/api/v1/api/hosts', api_token: 'secret-token' }),
+}
+
+// ── GET /api/jobs ─────────────────────────────────────────────────────────────
+
+describe('GET /api/jobs', () => {
+ it('returns empty array when no jobs', async () => {
+ const res = await request(app).get('/api/jobs')
+ expect(res.status).toBe(200)
+ expect(res.body).toEqual([])
+ })
+
+ it('returns jobs with masked api key', async () => {
+ createJob(testJob)
+ const res = await request(app).get('/api/jobs')
+ expect(res.body).toHaveLength(1)
+ expect(res.body[0].config.api_key).toBe('**REDACTED**')
+ })
+
+ it('returns jobs with masked api_token', async () => {
+ createJob(patchmonJob)
+ const res = await request(app).get('/api/jobs')
+ expect(res.body[0].config.api_token).toBe('**REDACTED**')
+ })
+})
+
+// ── GET /api/jobs/:id ─────────────────────────────────────────────────────────
+
+describe('GET /api/jobs/:id', () => {
+ it('returns job with runs array', async () => {
+ createJob(testJob)
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ const res = await request(app).get(`/api/jobs/${id}`)
+ expect(res.status).toBe(200)
+ expect(res.body.runs).toBeInstanceOf(Array)
+ })
+
+ it('returns 404 for unknown id', async () => {
+ expect((await request(app).get('/api/jobs/999')).status).toBe(404)
+ })
+
+ it('returns 400 for non-numeric id', async () => {
+ expect((await request(app).get('/api/jobs/abc')).status).toBe(400)
+ })
+})
+
+// ── PUT /api/jobs/:id ─────────────────────────────────────────────────────────
+
+describe('PUT /api/jobs/:id', () => {
+ it('updates enabled and schedule', async () => {
+ createJob(testJob)
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ const res = await request(app).put(`/api/jobs/${id}`).send({ enabled: true, schedule: 30 })
+ expect(res.status).toBe(200)
+ expect(res.body.enabled).toBe(1)
+ expect(res.body.schedule).toBe(30)
+ })
+
+ it('does not overwrite api_key when **REDACTED** is sent', async () => {
+ createJob(testJob)
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ await request(app).put(`/api/jobs/${id}`).send({ config: { api_key: '**REDACTED**' } })
+ expect(dbModule.getJob(id).config).toContain('tskey-test')
+ })
+
+ it('returns 404 for unknown id', async () => {
+ expect((await request(app).put('/api/jobs/999').send({})).status).toBe(404)
+ })
+})
+
+// ── POST /api/jobs/:id/run ────────────────────────────────────────────────────
+
+describe('POST /api/jobs/:id/run', () => {
+ afterEach(() => vi.unstubAllGlobals())
+
+ it('returns 404 for unknown id', async () => {
+ expect((await request(app).post('/api/jobs/999/run')).status).toBe(404)
+ })
+
+ it('runs job, returns summary, and logs the run', async () => {
+ createJob(testJob)
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ vi.stubGlobal('fetch', vi.fn().mockResolvedValueOnce({
+ ok: true,
+ json: async () => ({ devices: [] }),
+ }))
+ const res = await request(app).post(`/api/jobs/${id}/run`)
+ expect(res.status).toBe(200)
+ expect(res.body.summary).toBeDefined()
+ const detail = await request(app).get(`/api/jobs/${id}`)
+ expect(detail.body.runs).toHaveLength(1)
+ expect(detail.body.runs[0].status).toBe('success')
+ })
+
+ it('logs error run on failure', async () => {
+ createJob(testJob)
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ vi.stubGlobal('fetch', vi.fn().mockRejectedValueOnce(new Error('network error')))
+ const res = await request(app).post(`/api/jobs/${id}/run`)
+ expect(res.status).toBe(500)
+ const detail = await request(app).get(`/api/jobs/${id}`)
+ expect(detail.body.runs[0].status).toBe('error')
+ })
+
+ it('patchmon_sync: marks instances present in host list as patchmon=1', async () => {
+ createJob(patchmonJob)
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ vi.stubGlobal('fetch', vi.fn().mockResolvedValueOnce({
+ ok: true,
+ json: async () => [{ name: 'plex' }, { name: 'traefik' }],
+ }))
+ const res = await request(app).post(`/api/jobs/${id}/run`)
+ expect(res.status).toBe(200)
+ expect(res.body.summary).toMatch(/updated of/)
+ })
+
+ it('patchmon_sync: returns 500 when API token is missing', async () => {
+ createJob({ ...patchmonJob, config: JSON.stringify({ api_url: 'http://patchmon:3000/api/v1/api/hosts', api_token: '' }) })
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ const res = await request(app).post(`/api/jobs/${id}/run`)
+ expect(res.status).toBe(500)
+ })
+
+ it('semaphore_sync: parses ansible inventory and updates instances', async () => {
+ const semaphoreJob = {
+ key: 'semaphore_sync', name: 'Semaphore Sync', description: 'test',
+ enabled: 0, schedule: 60,
+ config: JSON.stringify({ api_url: 'http://semaphore:3000/api/project/1/inventory/1', api_token: 'bearer-token' }),
+ }
+ createJob(semaphoreJob)
+ const id = (await request(app).get('/api/jobs')).body[0].id
+ vi.stubGlobal('fetch', vi.fn().mockResolvedValueOnce({
+ ok: true,
+ json: async () => ({ inventory: '[production]\nplex\nhomeassistant\n' }),
+ }))
+ const res = await request(app).post(`/api/jobs/${id}/run`)
+ expect(res.status).toBe(200)
+ expect(res.body.summary).toMatch(/updated of/)
+ })
+})
diff --git a/tests/db.test.js b/tests/db.test.js
index fee0400..f216572 100644
--- a/tests/db.test.js
+++ b/tests/db.test.js
@@ -3,6 +3,8 @@ import {
_resetForTest,
getInstances, getInstance, getDistinctStacks,
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory,
+ getConfig, setConfig,
+ getJobs, getJob, createJob, updateJob, createJobRun, completeJobRun, getJobRuns,
} from '../server/db.js'
beforeEach(() => _resetForTest());
@@ -269,3 +271,118 @@ describe('test environment boot isolation', () => {
expect(getInstances()).toEqual([]);
});
});
+
+// ── getConfig / setConfig ─────────────────────────────────────────────────────
+
+describe('getConfig / setConfig', () => {
+ it('returns defaultVal when key does not exist', () => {
+ expect(getConfig('missing', 'fallback')).toBe('fallback');
+ });
+
+ it('returns empty string by default', () => {
+ expect(getConfig('missing')).toBe('');
+ });
+
+ it('stores and retrieves a value', () => {
+ setConfig('tailscale_api_key', 'tskey-test');
+ expect(getConfig('tailscale_api_key')).toBe('tskey-test');
+ });
+
+ it('overwrites an existing key', () => {
+ setConfig('tailscale_enabled', '0');
+ setConfig('tailscale_enabled', '1');
+ expect(getConfig('tailscale_enabled')).toBe('1');
+ });
+
+ it('config is cleared by _resetForTest', () => {
+ setConfig('tailscale_api_key', 'tskey-test');
+ _resetForTest();
+ expect(getConfig('tailscale_api_key')).toBe('');
+ });
+});
+
+// ── jobs ──────────────────────────────────────────────────────────────────────
+
+const baseJob = {
+ key: 'test_job', name: 'Test Job', description: 'desc',
+ enabled: 0, schedule: 15, config: '{}',
+};
+
+describe('jobs', () => {
+ it('returns empty array when no jobs', () => {
+ expect(getJobs()).toEqual([]);
+ });
+
+ it('createJob + getJobs returns the job', () => {
+ createJob(baseJob);
+ expect(getJobs()).toHaveLength(1);
+ expect(getJobs()[0].name).toBe('Test Job');
+ });
+
+ it('getJob returns null for unknown id', () => {
+ expect(getJob(999)).toBeNull();
+ });
+
+ it('updateJob changes enabled and schedule', () => {
+ createJob(baseJob);
+ const id = getJobs()[0].id;
+ updateJob(id, { enabled: 1, schedule: 30, config: '{}' });
+ expect(getJob(id).enabled).toBe(1);
+ expect(getJob(id).schedule).toBe(30);
+ });
+
+ it('getJobs includes last_status null when no runs', () => {
+ createJob(baseJob);
+ expect(getJobs()[0].last_status).toBeNull();
+ });
+
+ it('getJobs reflects last_status after a run', () => {
+ createJob(baseJob);
+ const id = getJobs()[0].id;
+ const runId = createJobRun(id);
+ completeJobRun(runId, 'success', 'ok');
+ expect(getJobs()[0].last_status).toBe('success');
+ });
+});
+
+// ── job_runs ──────────────────────────────────────────────────────────────────
+
+describe('job_runs', () => {
+ it('createJobRun returns a positive id', () => {
+ createJob(baseJob);
+ const id = getJobs()[0].id;
+ expect(createJobRun(id)).toBeGreaterThan(0);
+ });
+
+ it('new run has status running and no ended_at', () => {
+ createJob(baseJob);
+ const id = getJobs()[0].id;
+ const runId = createJobRun(id);
+ const runs = getJobRuns(id);
+ expect(runs[0].status).toBe('running');
+ expect(runs[0].ended_at).toBeNull();
+ });
+
+ it('completeJobRun sets status, result, and ended_at', () => {
+ createJob(baseJob);
+ const id = getJobs()[0].id;
+ const runId = createJobRun(id);
+ completeJobRun(runId, 'success', '2 updated of 8');
+ const run = getJobRuns(id)[0];
+ expect(run.status).toBe('success');
+ expect(run.result).toBe('2 updated of 8');
+ expect(run.ended_at).not.toBeNull();
+ });
+
+ it('getJobRuns returns newest first', () => {
+ createJob(baseJob);
+ const id = getJobs()[0].id;
+ const r1 = createJobRun(id);
+ const r2 = createJobRun(id);
+ completeJobRun(r1, 'success', 'first');
+ completeJobRun(r2, 'error', 'second');
+ const runs = getJobRuns(id);
+ expect(runs[0].id).toBe(r2);
+ expect(runs[1].id).toBe(r1);
+ });
+});