feat: include job config and run history in export/import backup
All checks were successful
CI / test (pull_request) Successful in 16s
CI / build-dev (pull_request) Has been skipped

Export bumped to version 3, now includes jobs (with raw unmasked
config) and job_runs arrays. Import restores them when present and
restarts the scheduler. Payloads without a jobs key leave jobs
untouched, keeping v1/v2 backups fully compatible.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-28 19:43:34 -04:00
parent a934db1a14
commit 954d85ca81
4 changed files with 85 additions and 9 deletions

View File

@@ -235,6 +235,33 @@ export function getAllHistory() {
return db.prepare('SELECT * FROM instance_history ORDER BY vmid, changed_at').all();
}
export function getAllJobs() {
return db.prepare('SELECT id, key, name, description, enabled, schedule, config FROM jobs ORDER BY id').all();
}
export function getAllJobRuns() {
return db.prepare('SELECT * FROM job_runs ORDER BY job_id, id').all();
}
export function importJobs(jobRows, jobRunRows = []) {
db.exec('BEGIN');
db.exec('DELETE FROM job_runs');
db.exec('DELETE FROM jobs');
const insertJob = db.prepare(`
INSERT INTO jobs (id, key, name, description, enabled, schedule, config)
VALUES (@id, @key, @name, @description, @enabled, @schedule, @config)
`);
for (const j of jobRows) insertJob.run(j);
if (jobRunRows.length) {
const insertRun = db.prepare(`
INSERT INTO job_runs (id, job_id, started_at, ended_at, status, result)
VALUES (@id, @job_id, @started_at, @ended_at, @status, @result)
`);
for (const r of jobRunRows) insertRun.run(r);
}
db.exec('COMMIT');
}
export function getConfig(key, defaultVal = '') {
const row = db.prepare('SELECT value FROM config WHERE key = ?').get(key);
return row ? row.value : defaultVal;

View File

@@ -3,6 +3,7 @@ import {
getInstances, getInstance, getDistinctStacks,
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory, getAllHistory,
getConfig, setConfig, getJobs, getJob, updateJob, getJobRuns,
getAllJobs, getAllJobRuns, importJobs,
} from './db.js';
import { runJob, restartJobs } from './jobs.js';
@@ -127,15 +128,17 @@ router.put('/instances/:vmid', (req, res) => {
// GET /api/export
router.get('/export', (_req, res) => {
const instances = getInstances();
const history = getAllHistory();
const history = getAllHistory();
const jobs = getAllJobs();
const job_runs = getAllJobRuns();
const date = new Date().toISOString().slice(0, 10);
res.setHeader('Content-Disposition', `attachment; filename="catalyst-backup-${date}.json"`);
res.json({ version: 2, exported_at: new Date().toISOString(), instances, history });
res.json({ version: 3, exported_at: new Date().toISOString(), instances, history, jobs, job_runs });
});
// POST /api/import
router.post('/import', (req, res) => {
const { instances, history = [] } = req.body ?? {};
const { instances, history = [], jobs, job_runs } = req.body ?? {};
if (!Array.isArray(instances)) {
return res.status(400).json({ error: 'body must contain an instances array' });
}
@@ -147,7 +150,14 @@ router.post('/import', (req, res) => {
if (errors.length) return res.status(400).json({ errors });
try {
importInstances(instances.map(normalise), Array.isArray(history) ? history : []);
res.json({ imported: instances.length });
if (Array.isArray(jobs)) {
importJobs(jobs, Array.isArray(job_runs) ? job_runs : []);
try { restartJobs(); } catch (e) { console.error('POST /api/import restartJobs', e); }
}
res.json({
imported: instances.length,
imported_jobs: Array.isArray(jobs) ? jobs.length : undefined,
});
} catch (e) {
console.error('POST /api/import', e);
res.status(500).json({ error: 'internal server error' });