Merge pull request 'feat: include job config and run history in export/import backup' (#51) from feat/jobs-system into dev
Reviewed-on: #51
This commit was merged in pull request #51.
This commit is contained in:
8
js/ui.js
8
js/ui.js
@@ -382,15 +382,17 @@ async function importDB() {
|
||||
document.getElementById('confirm-ok').onclick = async () => {
|
||||
closeConfirm();
|
||||
try {
|
||||
const { instances, history = [] } = JSON.parse(await file.text());
|
||||
const { instances, history = [], jobs, job_runs } = JSON.parse(await file.text());
|
||||
const res = await fetch('/api/import', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ instances, history }),
|
||||
body: JSON.stringify({ instances, history, jobs, job_runs }),
|
||||
});
|
||||
const data = await res.json();
|
||||
if (!res.ok) { showToast(data.error ?? 'Import failed', 'error'); return; }
|
||||
showToast(`Imported ${data.imported} instance${data.imported !== 1 ? 's' : ''}`, 'success');
|
||||
const parts = [`${data.imported} instance${data.imported !== 1 ? 's' : ''}`];
|
||||
if (data.imported_jobs != null) parts.push(`${data.imported_jobs} job${data.imported_jobs !== 1 ? 's' : ''}`);
|
||||
showToast(`Imported ${parts.join(', ')}`, 'success');
|
||||
closeSettingsModal();
|
||||
renderDashboard();
|
||||
} catch {
|
||||
|
||||
27
server/db.js
27
server/db.js
@@ -235,6 +235,33 @@ export function getAllHistory() {
|
||||
return db.prepare('SELECT * FROM instance_history ORDER BY vmid, changed_at').all();
|
||||
}
|
||||
|
||||
export function getAllJobs() {
|
||||
return db.prepare('SELECT id, key, name, description, enabled, schedule, config FROM jobs ORDER BY id').all();
|
||||
}
|
||||
|
||||
export function getAllJobRuns() {
|
||||
return db.prepare('SELECT * FROM job_runs ORDER BY job_id, id').all();
|
||||
}
|
||||
|
||||
export function importJobs(jobRows, jobRunRows = []) {
|
||||
db.exec('BEGIN');
|
||||
db.exec('DELETE FROM job_runs');
|
||||
db.exec('DELETE FROM jobs');
|
||||
const insertJob = db.prepare(`
|
||||
INSERT INTO jobs (id, key, name, description, enabled, schedule, config)
|
||||
VALUES (@id, @key, @name, @description, @enabled, @schedule, @config)
|
||||
`);
|
||||
for (const j of jobRows) insertJob.run(j);
|
||||
if (jobRunRows.length) {
|
||||
const insertRun = db.prepare(`
|
||||
INSERT INTO job_runs (id, job_id, started_at, ended_at, status, result)
|
||||
VALUES (@id, @job_id, @started_at, @ended_at, @status, @result)
|
||||
`);
|
||||
for (const r of jobRunRows) insertRun.run(r);
|
||||
}
|
||||
db.exec('COMMIT');
|
||||
}
|
||||
|
||||
export function getConfig(key, defaultVal = '') {
|
||||
const row = db.prepare('SELECT value FROM config WHERE key = ?').get(key);
|
||||
return row ? row.value : defaultVal;
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
getInstances, getInstance, getDistinctStacks,
|
||||
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory, getAllHistory,
|
||||
getConfig, setConfig, getJobs, getJob, updateJob, getJobRuns,
|
||||
getAllJobs, getAllJobRuns, importJobs,
|
||||
} from './db.js';
|
||||
import { runJob, restartJobs } from './jobs.js';
|
||||
|
||||
@@ -127,15 +128,17 @@ router.put('/instances/:vmid', (req, res) => {
|
||||
// GET /api/export
|
||||
router.get('/export', (_req, res) => {
|
||||
const instances = getInstances();
|
||||
const history = getAllHistory();
|
||||
const history = getAllHistory();
|
||||
const jobs = getAllJobs();
|
||||
const job_runs = getAllJobRuns();
|
||||
const date = new Date().toISOString().slice(0, 10);
|
||||
res.setHeader('Content-Disposition', `attachment; filename="catalyst-backup-${date}.json"`);
|
||||
res.json({ version: 2, exported_at: new Date().toISOString(), instances, history });
|
||||
res.json({ version: 3, exported_at: new Date().toISOString(), instances, history, jobs, job_runs });
|
||||
});
|
||||
|
||||
// POST /api/import
|
||||
router.post('/import', (req, res) => {
|
||||
const { instances, history = [] } = req.body ?? {};
|
||||
const { instances, history = [], jobs, job_runs } = req.body ?? {};
|
||||
if (!Array.isArray(instances)) {
|
||||
return res.status(400).json({ error: 'body must contain an instances array' });
|
||||
}
|
||||
@@ -147,7 +150,14 @@ router.post('/import', (req, res) => {
|
||||
if (errors.length) return res.status(400).json({ errors });
|
||||
try {
|
||||
importInstances(instances.map(normalise), Array.isArray(history) ? history : []);
|
||||
res.json({ imported: instances.length });
|
||||
if (Array.isArray(jobs)) {
|
||||
importJobs(jobs, Array.isArray(job_runs) ? job_runs : []);
|
||||
try { restartJobs(); } catch (e) { console.error('POST /api/import restartJobs', e); }
|
||||
}
|
||||
res.json({
|
||||
imported: instances.length,
|
||||
imported_jobs: Array.isArray(jobs) ? jobs.length : undefined,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('POST /api/import', e);
|
||||
res.status(500).json({ error: 'internal server error' });
|
||||
|
||||
@@ -276,9 +276,9 @@ describe('GET /api/export', () => {
|
||||
expect(res.body.instances).toEqual([])
|
||||
})
|
||||
|
||||
it('returns version 2', async () => {
|
||||
it('returns version 3', async () => {
|
||||
const res = await request(app).get('/api/export')
|
||||
expect(res.body.version).toBe(2)
|
||||
expect(res.body.version).toBe(3)
|
||||
})
|
||||
|
||||
it('includes a history array', async () => {
|
||||
@@ -287,6 +287,21 @@ describe('GET /api/export', () => {
|
||||
expect(res.body.history).toBeInstanceOf(Array)
|
||||
expect(res.body.history.some(e => e.field === 'created')).toBe(true)
|
||||
})
|
||||
|
||||
it('includes jobs and job_runs arrays', async () => {
|
||||
createJob(testJob)
|
||||
const res = await request(app).get('/api/export')
|
||||
expect(res.body.jobs).toBeInstanceOf(Array)
|
||||
expect(res.body.jobs).toHaveLength(1)
|
||||
expect(res.body.jobs[0].key).toBe('tailscale_sync')
|
||||
expect(res.body.job_runs).toBeInstanceOf(Array)
|
||||
})
|
||||
|
||||
it('exports raw job config without masking', async () => {
|
||||
createJob(testJob)
|
||||
const res = await request(app).get('/api/export')
|
||||
expect(res.body.jobs[0].config).toContain('tskey-test')
|
||||
})
|
||||
})
|
||||
|
||||
// ── POST /api/import ──────────────────────────────────────────────────────────
|
||||
@@ -341,6 +356,28 @@ describe('POST /api/import', () => {
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.imported).toBe(1)
|
||||
})
|
||||
|
||||
it('imports jobs and job_runs and returns imported_jobs count', async () => {
|
||||
const exp = await request(app).get('/api/export')
|
||||
createJob(testJob)
|
||||
const fullExport = await request(app).get('/api/export')
|
||||
const res = await request(app).post('/api/import').send({
|
||||
instances: fullExport.body.instances,
|
||||
history: fullExport.body.history,
|
||||
jobs: fullExport.body.jobs,
|
||||
job_runs: fullExport.body.job_runs,
|
||||
})
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.imported_jobs).toBe(1)
|
||||
expect((await request(app).get('/api/jobs')).body).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('leaves jobs untouched when no jobs key in payload', async () => {
|
||||
createJob(testJob)
|
||||
await request(app).post('/api/import')
|
||||
.send({ instances: [{ ...base, vmid: 1, name: 'x' }] })
|
||||
expect((await request(app).get('/api/jobs')).body).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
// ── Static assets & SPA routing ───────────────────────────────────────────────
|
||||
|
||||
Reference in New Issue
Block a user