Compare commits
118 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 9295354e72 | |||
| 372cda6a58 | |||
| 3301e942ef | |||
| c4ebb76deb | |||
| bb765453ab | |||
| 88474d1048 | |||
| 954d85ca81 | |||
| 117dfc5f17 | |||
| c39c7a8aef | |||
| a934db1a14 | |||
| ea4c5f7c95 | |||
| 5c12acb6c7 | |||
| 0b350f3b28 | |||
| db4071a2cf | |||
| 37cd77850e | |||
| 14a4826bb6 | |||
| 550135ca37 | |||
| d7727badb1 | |||
| 537d78e71b | |||
| 47e9c4faf7 | |||
| 31a5090f4f | |||
| ecdac6fe23 | |||
| 07cef73fae | |||
| 1a84edc064 | |||
| bfb2c26821 | |||
| a985268987 | |||
| 218cdb08c5 | |||
| 2855cc7f81 | |||
| 07d2e215e4 | |||
| 8ef839d6d0 | |||
| 7af88328c8 | |||
| 096e2afb3d | |||
| e3d089a71f | |||
| 668e7c34bb | |||
| e796b4f400 | |||
| a4b5c20993 | |||
| d17f364fc5 | |||
| 5f79eec3dd | |||
| ed98bb57c0 | |||
| 120b61a423 | |||
| 074f0600af | |||
| e4f9407827 | |||
| fde5ce7dc1 | |||
| 20df10b333 | |||
| c906511bfc | |||
| 745e5920ad | |||
| 90e0a98914 | |||
| cba4b73798 | |||
| 0d567472a9 | |||
| 9f6b2ece52 | |||
| e3911157e9 | |||
| 0589288dfe | |||
| 8ead7687e5 | |||
| 0e1e9b6699 | |||
| 3c008c5bce | |||
| 1582c28b28 | |||
| bcd934f5b1 | |||
| 4c9acd20c7 | |||
| 520fb98d96 | |||
| 800184d2be | |||
| 82c314f85c | |||
| 2fba532ec7 | |||
| 9177578aaf | |||
| 94c4a0af51 | |||
| ec60d53767 | |||
| ad81d7ace7 | |||
| badd542bd7 | |||
| 7c31ee3327 | |||
| 0ecfa7dbc9 | |||
| f16fb3e088 | |||
| cb01573cdf | |||
| b48d5fb836 | |||
| 6e124576cb | |||
| 1f328e026d | |||
| 71c2c68fbc | |||
| 8bcf8229db | |||
| 6e1e9f7153 | |||
| 1fbb74d1ef | |||
| 617a5b5800 | |||
| 0985d9d481 | |||
| 2af6c56558 | |||
| af207339a4 | |||
| cd16b7ea28 | |||
| 20d8a13375 | |||
| f72aaa52f8 | |||
| dd47d5006e | |||
| 10e25e1803 | |||
| afbdefa549 | |||
| 1a62e2fdd9 | |||
| 1271c061fd | |||
| 7b2a996c21 | |||
| 3233d65db0 | |||
| f1e192c5d4 | |||
| 3037381084 | |||
| e54c1d4848 | |||
| 3ae3f98df5 | |||
| 65d6514603 | |||
| bc44bcbde9 | |||
| cae0f2222a | |||
| 28833a7ec6 | |||
| 6ba02bf17d | |||
| bfe71b2511 | |||
| 0f2a37cb39 | |||
| 73f4eabbc7 | |||
| 515ff8ddb3 | |||
| 08c12c9394 | |||
| 4ce7df4649 | |||
| 6c04a30c3a | |||
| c6cd8098fd | |||
| 15ed329743 | |||
| 1412b2e0b7 | |||
| 30b037ff9c | |||
| 7a5b5d7afc | |||
| 3383bee968 | |||
| 0c30e4bd29 | |||
| 01f83d25f6 | |||
| 79adc365d8 | |||
| 6e40413385 |
@@ -1,7 +1,9 @@
|
|||||||
{
|
{
|
||||||
"permissions": {
|
"permissions": {
|
||||||
"allow": [
|
"allow": [
|
||||||
"Bash(npm test:*)"
|
"Bash(npm test:*)",
|
||||||
|
"Bash(npm install:*)",
|
||||||
|
"Bash(find /c/Users/josh1/Documents/Code/Catalyst -type f \\\\\\(-name *.test.js -o -name *.spec.js -o -name .env* -o -name *.config.js \\\\\\))"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
.git
|
.git
|
||||||
.gitea
|
.gitea
|
||||||
|
.gitignore
|
||||||
Dockerfile
|
Dockerfile
|
||||||
.dockerignore
|
.dockerignore
|
||||||
docker-compose.yml
|
docker-compose.yml
|
||||||
node_modules
|
node_modules
|
||||||
tests
|
tests
|
||||||
vitest.config.js
|
vitest.config.js
|
||||||
|
data
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
name: Build
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
tags:
|
|
||||||
- 'v*'
|
|
||||||
|
|
||||||
env:
|
|
||||||
IMAGE: ${{ vars.REGISTRY_HOST }}/${{ gitea.repository_owner }}/catalyst
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: lts/*
|
|
||||||
cache: npm
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
run: npm test
|
|
||||||
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: test
|
|
||||||
if: startsWith(gitea.ref, 'refs/tags/v')
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Docker metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ env.IMAGE }}
|
|
||||||
tags: |
|
|
||||||
type=semver,pattern={{version}}
|
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
|
||||||
type=sha,prefix=,format=short
|
|
||||||
type=raw,value=latest,enable={{is_default_branch}}
|
|
||||||
|
|
||||||
- name: Log in to Gitea registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ${{ vars.REGISTRY_HOST }}
|
|
||||||
username: ${{ gitea.actor }}
|
|
||||||
password: ${{ secrets.TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and push
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
|
|
||||||
release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: build
|
|
||||||
if: startsWith(gitea.ref, 'refs/tags/v')
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Create release
|
|
||||||
run: |
|
|
||||||
curl -sf -X POST \
|
|
||||||
-H "Authorization: token ${{ secrets.TOKEN }}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
"${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/releases" \
|
|
||||||
-d "{
|
|
||||||
\"tag_name\": \"${{ gitea.ref_name }}\",
|
|
||||||
\"name\": \"Catalyst ${{ gitea.ref_name }}\",
|
|
||||||
\"body\": \"### Image\n\n\`${{ env.IMAGE }}:${{ gitea.ref_name }}\`\",
|
|
||||||
\"draft\": false,
|
|
||||||
\"prerelease\": false
|
|
||||||
}"
|
|
||||||
53
.gitea/workflows/ci.yml
Normal file
53
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [dev, main]
|
||||||
|
pull_request:
|
||||||
|
branches: [dev, main]
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: ${{ vars.REGISTRY_HOST }}/${{ gitea.repository_owner }}/catalyst
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 'lts/*'
|
||||||
|
|
||||||
|
- run: npm ci
|
||||||
|
|
||||||
|
- run: npm test
|
||||||
|
|
||||||
|
build-dev:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && github.ref == 'refs/heads/dev'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ vars.REGISTRY_HOST }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.TOKEN }}
|
||||||
|
|
||||||
|
- name: Compute short SHA
|
||||||
|
run: echo "SHORT_SHA=$(git rev-parse --short HEAD)" >> $GITEA_ENV
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
build-args: BUILD_VERSION=dev-${{ env.SHORT_SHA }}
|
||||||
|
tags: |
|
||||||
|
${{ env.IMAGE }}:dev
|
||||||
|
${{ env.IMAGE }}:dev-${{ gitea.sha }}
|
||||||
109
.gitea/workflows/release.yml
Normal file
109
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: ${{ vars.REGISTRY_HOST }}/${{ gitea.repository_owner }}/catalyst
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 'lts/*'
|
||||||
|
|
||||||
|
- run: npm ci
|
||||||
|
- run: npm test
|
||||||
|
|
||||||
|
- name: Read version
|
||||||
|
run: |
|
||||||
|
VERSION=$(node -p "require('./package.json').version")
|
||||||
|
echo "VERSION=${VERSION}" >> $GITEA_ENV
|
||||||
|
|
||||||
|
- name: Assert tag does not exist
|
||||||
|
run: |
|
||||||
|
if git ls-remote --tags origin "refs/tags/v${{ env.VERSION }}" | grep -q .; then
|
||||||
|
echo "ERROR: tag v${{ env.VERSION }} already exists — bump version in package.json before merging to main."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create and push tag
|
||||||
|
run: |
|
||||||
|
git config user.name "gitea-actions"
|
||||||
|
git config user.email "actions@gitea"
|
||||||
|
git tag "v${{ env.VERSION }}"
|
||||||
|
git push origin "v${{ env.VERSION }}"
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
run: |
|
||||||
|
LAST_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||||
|
if [ -n "$LAST_TAG" ]; then
|
||||||
|
git log "${LAST_TAG}..HEAD" --pretty=format:"- %s" --no-merges > /tmp/release_notes.txt
|
||||||
|
else
|
||||||
|
git log --pretty=format:"- %s" --no-merges > /tmp/release_notes.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Docker metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.IMAGE }}
|
||||||
|
tags: |
|
||||||
|
type=semver,pattern={{version}},value=v${{ env.VERSION }}
|
||||||
|
type=semver,pattern={{major}}.{{minor}},value=v${{ env.VERSION }}
|
||||||
|
type=sha,prefix=,format=short
|
||||||
|
type=raw,value=latest
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ vars.REGISTRY_HOST }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
|
||||||
|
- name: Create Gitea release
|
||||||
|
run: |
|
||||||
|
cat > /tmp/make_release.py << 'PYEOF'
|
||||||
|
import json, os
|
||||||
|
v = os.environ['VERSION']
|
||||||
|
img = os.environ['IMAGE']
|
||||||
|
raw = open('/tmp/release_notes.txt').read().strip()
|
||||||
|
feats, fixes = [], []
|
||||||
|
for line in raw.splitlines():
|
||||||
|
msg = line.lstrip('- ').strip()
|
||||||
|
if msg.startswith('feat:'):
|
||||||
|
feats.append('- ' + msg[5:].strip())
|
||||||
|
elif msg.startswith('fix:'):
|
||||||
|
fixes.append('- ' + msg[4:].strip())
|
||||||
|
sections = []
|
||||||
|
if feats:
|
||||||
|
sections.append('### New Features\n\n' + '\n'.join(feats))
|
||||||
|
if fixes:
|
||||||
|
sections.append('### Bug Fixes\n\n' + '\n'.join(fixes))
|
||||||
|
notes = '\n\n'.join(sections) or '_No changes_'
|
||||||
|
body = notes + '\n\n### Image\n\n`' + img + ':' + v + '`'
|
||||||
|
payload = {'tag_name': 'v'+v, 'name': 'Catalyst v'+v, 'body': body, 'draft': False, 'prerelease': False}
|
||||||
|
open('/tmp/release_body.json', 'w').write(json.dumps(payload))
|
||||||
|
PYEOF
|
||||||
|
python3 /tmp/make_release.py
|
||||||
|
curl -sf -X POST \
|
||||||
|
-H "Authorization: token ${{ secrets.TOKEN }}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/releases" \
|
||||||
|
--data @/tmp/release_body.json
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,2 +1,4 @@
|
|||||||
node_modules/
|
node_modules/
|
||||||
js/version.js
|
data/*.db
|
||||||
|
data/*.db-shm
|
||||||
|
data/*.db-wal
|
||||||
|
|||||||
28
Dockerfile
28
Dockerfile
@@ -1,6 +1,22 @@
|
|||||||
FROM nginx:alpine
|
FROM node:lts-alpine
|
||||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
RUN addgroup -S app && adduser -S app -G app
|
||||||
COPY . /usr/share/nginx/html
|
|
||||||
RUN awk -F'"' '/"version"/{printf "const VERSION = \"%s\";\n", $4; exit}' \
|
WORKDIR /app
|
||||||
/usr/share/nginx/html/package.json \
|
|
||||||
> /usr/share/nginx/html/js/version.js
|
COPY package*.json ./
|
||||||
|
RUN npm ci --omit=dev
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
ARG BUILD_VERSION=""
|
||||||
|
RUN if [ -n "$BUILD_VERSION" ]; then \
|
||||||
|
printf 'const VERSION = "%s";\n' "$BUILD_VERSION" > js/version.js; \
|
||||||
|
else \
|
||||||
|
awk -F'"' '/"version"/{printf "const VERSION = \"%s\";\n", $4; exit}' \
|
||||||
|
package.json > js/version.js; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
RUN mkdir -p /app/data && chown -R app:app /app
|
||||||
|
USER app
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
CMD ["node", "server/server.js"]
|
||||||
|
|||||||
349
README.md
349
README.md
@@ -1,134 +1,243 @@
|
|||||||
# Catalyst
|
# Catalyst
|
||||||
|
|
||||||
A lightweight instance registry for tracking self-hosted infrastructure. No backend, no framework — just a browser, a SQLite database compiled to WebAssembly, and a static file server. :)
|
A self-hosted infrastructure registry for homelab Proxmox environments. Track virtual machines across stacks, monitor service health, and maintain a full audit log of every configuration change.
|
||||||
|
|
||||||
## Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
index.html Entry point
|
|
||||||
css/app.css Styles
|
|
||||||
js/
|
|
||||||
config.js Service definitions and seed data
|
|
||||||
db.js Data layer
|
|
||||||
ui.js Rendering, modals, notifications
|
|
||||||
app.js Router
|
|
||||||
```
|
|
||||||
|
|
||||||
## Data layer
|
|
||||||
|
|
||||||
All reads and writes go through five functions in `js/db.js`. This is the boundary that would be replaced when wiring Catalyst to a real backend — nothing else in the codebase touches data directly.
|
|
||||||
|
|
||||||
### `getInstances(filters?)`
|
|
||||||
|
|
||||||
Returns an array of instances, sorted by name. All filters are optional.
|
|
||||||
|
|
||||||
```js
|
|
||||||
getInstances()
|
|
||||||
getInstances({ search: 'plex' })
|
|
||||||
getInstances({ state: 'degraded' })
|
|
||||||
getInstances({ stack: 'production' })
|
|
||||||
getInstances({ search: 'home', state: 'deployed', stack: 'production' })
|
|
||||||
```
|
|
||||||
|
|
||||||
`search` matches against `name`, `vmid`, and `stack`.
|
|
||||||
|
|
||||||
### `getInstance(vmid)`
|
|
||||||
|
|
||||||
Returns a single instance by VMID, or `null` if not found.
|
|
||||||
|
|
||||||
```js
|
|
||||||
getInstance(137) // → { id, name, vmid, state, stack, ...services, createdAt, updatedAt }
|
|
||||||
```
|
|
||||||
|
|
||||||
### `getDistinctStacks()`
|
|
||||||
|
|
||||||
Returns a sorted array of unique stack names present in the registry. Used to populate the stack filter dynamically.
|
|
||||||
|
|
||||||
```js
|
|
||||||
getDistinctStacks() // → ['development', 'production']
|
|
||||||
```
|
|
||||||
|
|
||||||
### `createInstance(data)`
|
|
||||||
|
|
||||||
Inserts a new instance. Returns `{ ok: true }` on success or `{ ok: false, error }` on failure (e.g. duplicate VMID).
|
|
||||||
|
|
||||||
```js
|
|
||||||
createInstance({
|
|
||||||
name: 'plex',
|
|
||||||
vmid: 117,
|
|
||||||
state: 'deployed', // 'deployed' | 'testing' | 'degraded'
|
|
||||||
stack: 'production',
|
|
||||||
tailscale_ip: '100.64.0.1',
|
|
||||||
atlas: 1,
|
|
||||||
argus: 1,
|
|
||||||
semaphore: 0,
|
|
||||||
patchmon: 1,
|
|
||||||
tailscale: 1,
|
|
||||||
andromeda: 0,
|
|
||||||
hardware_acceleration: 1,
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
### `updateInstance(id, data)`
|
|
||||||
|
|
||||||
Updates an existing instance by internal `id`. Accepts the same shape as `createInstance`. Returns `{ ok: true }` or `{ ok: false, error }`.
|
|
||||||
|
|
||||||
### `deleteInstance(id)`
|
|
||||||
|
|
||||||
Deletes an instance by internal `id`. Only instances on the `development` stack can be deleted — this is enforced in the UI before `deleteInstance` is ever called.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Instance shape
|
## Features
|
||||||
|
|
||||||
| Field | Type | Notes |
|
- **Dashboard** — filterable, searchable instance list with state and stack badges
|
||||||
|
- **Detail pages** — per-instance view with service flags, Tailscale IP, and a full change timeline
|
||||||
|
- **Audit log** — every field change is recorded with before/after values and a timestamp
|
||||||
|
- **Full CRUD** — add, edit, and delete instances via a clean modal interface
|
||||||
|
- **Production safeguard** — only development instances can be deleted; production instances must be demoted first
|
||||||
|
- **Export / import** — JSON backup and restore via the settings modal
|
||||||
|
- **REST API** — every operation is a plain HTTP call
|
||||||
|
- **Persistent storage** — SQLite on a Docker named volume; survives restarts and upgrades
|
||||||
|
- **Zero native dependencies** — SQLite via Node's built-in `node:sqlite`; no compilation, no binaries
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick start
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
Open [http://localhost:3000](http://localhost:3000).
|
||||||
|
|
||||||
|
### Environment variables
|
||||||
|
|
||||||
|
| Variable | Default | Description |
|
||||||
|---|---|---|
|
|---|---|---|
|
||||||
| `id` | integer | Internal autoincrement ID |
|
| `PORT` | `3000` | HTTP port the server binds to |
|
||||||
| `vmid` | integer | Unique. Used as the public identifier and in URLs (`/instance/137`) |
|
| `DB_PATH` | `data/catalyst.db` | Path to the SQLite database file |
|
||||||
| `name` | string | Display name |
|
|
||||||
| `state` | string | `deployed`, `testing`, or `degraded` |
|
---
|
||||||
| `stack` | string | `production` or `development` |
|
|
||||||
| `tailscale_ip` | string | Optional |
|
## REST API
|
||||||
| `atlas` | 0 \| 1 | |
|
|
||||||
| `argus` | 0 \| 1 | |
|
All endpoints are under `/api`. Request and response bodies are JSON.
|
||||||
| `semaphore` | 0 \| 1 | |
|
|
||||||
| `patchmon` | 0 \| 1 | |
|
### Instances
|
||||||
| `tailscale` | 0 \| 1 | |
|
|
||||||
| `andromeda` | 0 \| 1 | |
|
#### `GET /api/instances`
|
||||||
| `hardware_acceleration` | 0 \| 1 | |
|
|
||||||
| `createdAt` | ISO string | Set on insert |
|
Returns all instances sorted by name. All query parameters are optional.
|
||||||
| `updatedAt` | ISO string | Updated on every write |
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `search` | string | Partial match on `name`, `vmid`, or `stack` |
|
||||||
|
| `state` | string | Exact match: `deployed`, `testing`, `degraded` |
|
||||||
|
| `stack` | string | Exact match: `production`, `development` |
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /api/instances?search=plex&state=deployed
|
||||||
|
```
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"vmid": 117,
|
||||||
|
"name": "plex",
|
||||||
|
"state": "deployed",
|
||||||
|
"stack": "production",
|
||||||
|
"tailscale_ip": "100.64.0.1",
|
||||||
|
"atlas": 1, "argus": 1, "semaphore": 0,
|
||||||
|
"patchmon": 1, "tailscale": 1, "andromeda": 0,
|
||||||
|
"hardware_acceleration": 1,
|
||||||
|
"created_at": "2024-01-15T10:30:00",
|
||||||
|
"updated_at": "2024-03-10T14:22:00"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### `GET /api/instances/stacks`
|
||||||
|
|
||||||
|
Returns a sorted array of distinct stack names present in the registry.
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /api/instances/stacks
|
||||||
|
→ ["development", "production"]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### `GET /api/instances/:vmid`
|
||||||
|
|
||||||
|
Returns a single instance by VMID.
|
||||||
|
|
||||||
|
| Status | Condition |
|
||||||
|
|---|---|
|
||||||
|
| `200` | Instance found |
|
||||||
|
| `400` | VMID is not a valid integer |
|
||||||
|
| `404` | No instance with that VMID |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### `GET /api/instances/:vmid/history`
|
||||||
|
|
||||||
|
Returns the audit log for an instance — newest events first.
|
||||||
|
|
||||||
|
| Status | Condition |
|
||||||
|
|---|---|
|
||||||
|
| `200` | History returned (may be empty array) |
|
||||||
|
| `400` | VMID is not a valid integer |
|
||||||
|
| `404` | No instance with that VMID |
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"vmid": 117,
|
||||||
|
"field": "state",
|
||||||
|
"old_value": "testing",
|
||||||
|
"new_value": "deployed",
|
||||||
|
"changed_at": "2024-03-10T14:22:00"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"vmid": 117,
|
||||||
|
"field": "created",
|
||||||
|
"old_value": null,
|
||||||
|
"new_value": null,
|
||||||
|
"changed_at": "2024-01-15T10:30:00"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### `POST /api/instances`
|
||||||
|
|
||||||
|
Creates a new instance. Returns the created record.
|
||||||
|
|
||||||
|
| Status | Condition |
|
||||||
|
|---|---|
|
||||||
|
| `201` | Created successfully |
|
||||||
|
| `400` | Validation error — see `errors` array in response |
|
||||||
|
| `409` | VMID already exists |
|
||||||
|
|
||||||
|
**Request body:**
|
||||||
|
|
||||||
|
| Field | Type | Required | Notes |
|
||||||
|
|---|---|---|---|
|
||||||
|
| `name` | string | yes | |
|
||||||
|
| `vmid` | integer | yes | Must be > 0 and unique |
|
||||||
|
| `state` | string | yes | `deployed`, `testing`, or `degraded` |
|
||||||
|
| `stack` | string | yes | `production` or `development` |
|
||||||
|
| `tailscale_ip` | string | no | Valid IPv4 or empty string |
|
||||||
|
| `atlas` | 0\|1 | no | |
|
||||||
|
| `argus` | 0\|1 | no | |
|
||||||
|
| `semaphore` | 0\|1 | no | |
|
||||||
|
| `patchmon` | 0\|1 | no | |
|
||||||
|
| `tailscale` | 0\|1 | no | |
|
||||||
|
| `andromeda` | 0\|1 | no | |
|
||||||
|
| `hardware_acceleration` | 0\|1 | no | |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### `PUT /api/instances/:vmid`
|
||||||
|
|
||||||
|
Replaces all fields on an existing instance. Accepts the same body shape as `POST`. The `vmid` in the body may differ from the URL — this is how you change a VMID.
|
||||||
|
|
||||||
|
| Status | Condition |
|
||||||
|
|---|---|
|
||||||
|
| `200` | Updated successfully |
|
||||||
|
| `400` | Validation error |
|
||||||
|
| `404` | No instance with that VMID |
|
||||||
|
| `409` | New VMID conflicts with an existing instance |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### `DELETE /api/instances/:vmid`
|
||||||
|
|
||||||
|
Deletes an instance. Only instances on the `development` stack may be deleted.
|
||||||
|
|
||||||
|
| Status | Condition |
|
||||||
|
|---|---|
|
||||||
|
| `204` | Deleted successfully |
|
||||||
|
| `400` | VMID is not a valid integer |
|
||||||
|
| `404` | No instance with that VMID |
|
||||||
|
| `422` | Instance is on the `production` stack |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Backup
|
||||||
|
|
||||||
|
#### `GET /api/export`
|
||||||
|
|
||||||
|
Downloads a JSON backup of all instances as a file attachment.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"exported_at": "2024-03-10T14:22:00.000Z",
|
||||||
|
"instances": [ ... ]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `POST /api/import`
|
||||||
|
|
||||||
|
Replaces all instances from a JSON backup. Validates every row before committing — if any row is invalid the entire import is rejected.
|
||||||
|
|
||||||
|
| Status | Condition |
|
||||||
|
|---|---|
|
||||||
|
| `200` | Import successful — returns `{ "imported": N }` |
|
||||||
|
| `400` | Body missing `instances` array, or validation errors |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm test # run all tests once
|
||||||
|
npm run test:watch # watch mode
|
||||||
|
npm start # start the server on :3000
|
||||||
|
```
|
||||||
|
|
||||||
|
Tests are split across three files:
|
||||||
|
|
||||||
|
| File | What it covers |
|
||||||
|
|---|---|
|
||||||
|
| `tests/db.test.js` | SQLite data layer — CRUD, constraints, filters, history logging |
|
||||||
|
| `tests/api.test.js` | HTTP API — all endpoints, status codes, error cases |
|
||||||
|
| `tests/helpers.test.js` | UI helpers — `esc()` XSS contract, date formatting, history formatters |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Versioning
|
## Versioning
|
||||||
|
|
||||||
Catalyst uses [semantic versioning](https://semver.org). The version in `package.json` is the source of truth and must match the release tag.
|
Catalyst uses [semantic versioning](https://semver.org). `package.json` is the single source of truth.
|
||||||
|
|
||||||
| Change | Bump | Example |
|
| Change | Bump |
|
||||||
|---|---|---|
|
|---|---|
|
||||||
| Bug fix | patch | `1.0.0` → `1.0.1` |
|
| Bug fix | patch |
|
||||||
| New feature, backward compatible | minor | `1.0.0` → `1.1.0` |
|
| New feature, backward compatible | minor |
|
||||||
| Breaking change | major | `1.0.0` → `2.0.0` |
|
| Breaking change | major |
|
||||||
|
|
||||||
### Cutting a release
|
Pushing a tag triggers the CI pipeline: **test → build → release**.
|
||||||
|
Docker images are tagged `:x.y.z`, `:x.y`, and `:latest`.
|
||||||
**1. Bump the version in `package.json`**
|
|
||||||
```json
|
|
||||||
"version": "1.1.0"
|
|
||||||
```
|
|
||||||
|
|
||||||
**2. Commit, tag, and push**
|
|
||||||
```bash
|
|
||||||
git add package.json
|
|
||||||
git commit -m "chore: release v1.1.0"
|
|
||||||
git tag v1.1.0
|
|
||||||
git push && git push --tags
|
|
||||||
```
|
|
||||||
|
|
||||||
Pushing the tag triggers the full pipeline: tests → build → release.
|
|
||||||
|
|
||||||
- The image is tagged `:1.1.0`, `:1.1`, and `:latest` in the Gitea registry
|
|
||||||
- A Gitea release is created at `v1.1.0` with the image reference in the release notes
|
|
||||||
|
|
||||||
Pushes to `main` without a tag still run tests and build a `:latest` image — no release is created.
|
|
||||||
|
|||||||
232
css/app.css
232
css/app.css
@@ -25,6 +25,10 @@
|
|||||||
--mono: 'JetBrains Mono', 'IBM Plex Mono', monospace;
|
--mono: 'JetBrains Mono', 'IBM Plex Mono', monospace;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
html {
|
||||||
|
zoom: 1.1;
|
||||||
|
}
|
||||||
|
|
||||||
html, body {
|
html, body {
|
||||||
height: 100%;
|
height: 100%;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
@@ -70,6 +74,19 @@ nav {
|
|||||||
|
|
||||||
.nav-sep { flex: 1; }
|
.nav-sep { flex: 1; }
|
||||||
|
|
||||||
|
.nav-btn {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border2);
|
||||||
|
color: var(--text2);
|
||||||
|
border-radius: 6px;
|
||||||
|
padding: 4px 8px;
|
||||||
|
font-size: 14px;
|
||||||
|
cursor: pointer;
|
||||||
|
margin-left: 10px;
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
.nav-btn:hover { border-color: var(--accent); color: var(--accent); }
|
||||||
|
|
||||||
.nav-divider { color: var(--border2); }
|
.nav-divider { color: var(--border2); }
|
||||||
|
|
||||||
.nav-status {
|
.nav-status {
|
||||||
@@ -289,6 +306,7 @@ select:focus { border-color: var(--accent); }
|
|||||||
border-radius: 3px;
|
border-radius: 3px;
|
||||||
letter-spacing: 0.08em;
|
letter-spacing: 0.08em;
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
.badge.deployed { background: var(--accent2); color: var(--accent); }
|
.badge.deployed { background: var(--accent2); color: var(--accent); }
|
||||||
@@ -360,16 +378,25 @@ select:focus { border-color: var(--accent); }
|
|||||||
}
|
}
|
||||||
|
|
||||||
.detail-sub {
|
.detail-sub {
|
||||||
font-size: 12px;
|
font-size: 13px;
|
||||||
color: var(--text3);
|
margin-top: 8px;
|
||||||
margin-top: 6px;
|
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 16px;
|
align-items: center;
|
||||||
|
gap: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.detail-sub span { display: flex; gap: 4px; }
|
.detail-sub > span {
|
||||||
.detail-sub .lbl { color: var(--text3); }
|
display: flex;
|
||||||
.detail-sub .val { color: var(--text2); }
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
.detail-sub > span + span {
|
||||||
|
margin-left: 12px;
|
||||||
|
padding-left: 12px;
|
||||||
|
border-left: 1px solid var(--border);
|
||||||
|
}
|
||||||
|
.detail-sub .lbl { color: var(--text3); font-size: 11px; }
|
||||||
|
.detail-sub .val { color: var(--text); }
|
||||||
|
|
||||||
.detail-actions { display: flex; gap: 8px; }
|
.detail-actions { display: flex; gap: 8px; }
|
||||||
|
|
||||||
@@ -614,6 +641,58 @@ select:focus { border-color: var(--accent); }
|
|||||||
|
|
||||||
.confirm-actions { display: flex; justify-content: flex-end; gap: 10px; }
|
.confirm-actions { display: flex; justify-content: flex-end; gap: 10px; }
|
||||||
|
|
||||||
|
/* ── HISTORY TIMELINE ── */
|
||||||
|
.tl-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 24px;
|
||||||
|
padding: 9px 0;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
}
|
||||||
|
.tl-item:last-child { border-bottom: none; }
|
||||||
|
.tl-event { display: flex; align-items: center; gap: 7px; font-size: 13px; min-width: 0; }
|
||||||
|
.tl-label { color: var(--text2); }
|
||||||
|
.tl-sep { color: var(--text3); user-select: none; }
|
||||||
|
.tl-old { color: var(--text3); text-decoration: line-through; font-size: 12px; }
|
||||||
|
.tl-arrow { color: var(--text3); font-size: 11px; }
|
||||||
|
.tl-new { color: var(--text); font-weight: 500; }
|
||||||
|
.tl-time { color: var(--text3); font-size: 11px; white-space: nowrap; flex-shrink: 0; }
|
||||||
|
.tl-deployed { color: var(--accent); }
|
||||||
|
.tl-testing { color: var(--amber); }
|
||||||
|
.tl-degraded { color: var(--red); }
|
||||||
|
.tl-created .tl-event { color: var(--accent); font-weight: 500; }
|
||||||
|
.tl-empty { color: var(--text3); font-size: 12px; padding: 8px 0; }
|
||||||
|
|
||||||
|
/* ── SETTINGS MODAL ── */
|
||||||
|
#settings-modal .modal-body { padding-top: 0; }
|
||||||
|
.settings-section { padding: 16px 0; border-bottom: 1px solid var(--border); }
|
||||||
|
.settings-section:last-child { border-bottom: none; padding-bottom: 0; }
|
||||||
|
.settings-section-title {
|
||||||
|
font-size: 10px;
|
||||||
|
font-weight: 600;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.1em;
|
||||||
|
color: var(--text3);
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
.settings-desc { font-size: 12px; color: var(--text2); margin: 0 0 14px; line-height: 1.6; }
|
||||||
|
.settings-row { display: flex; align-items: center; gap: 12px; }
|
||||||
|
.settings-label { font-size: 13px; color: var(--text2); white-space: nowrap; min-width: 80px; }
|
||||||
|
.settings-select { flex: 1; }
|
||||||
|
.import-row { display: flex; gap: 10px; align-items: center; }
|
||||||
|
.import-file-input { flex: 1; }
|
||||||
|
|
||||||
|
.btn-secondary {
|
||||||
|
background: var(--bg3);
|
||||||
|
border-color: var(--border2);
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.btn-secondary:hover { border-color: var(--accent); color: var(--accent); }
|
||||||
|
|
||||||
|
.btn-danger { background: var(--red2); border-color: var(--red); color: var(--text); }
|
||||||
|
.btn-danger:hover { background: var(--red); }
|
||||||
|
|
||||||
/* ── SCROLLBAR ── */
|
/* ── SCROLLBAR ── */
|
||||||
::-webkit-scrollbar { width: 6px; }
|
::-webkit-scrollbar { width: 6px; }
|
||||||
::-webkit-scrollbar-track { background: var(--bg); }
|
::-webkit-scrollbar-track { background: var(--bg); }
|
||||||
@@ -633,3 +712,142 @@ select:focus { border-color: var(--accent); }
|
|||||||
0%, 100% { opacity: 1; }
|
0%, 100% { opacity: 1; }
|
||||||
50% { opacity: 0; }
|
50% { opacity: 0; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ── MOBILE ── */
|
||||||
|
@media (max-width: 640px) {
|
||||||
|
/* Reset desktop zoom — mobile browsers handle scaling themselves */
|
||||||
|
html { zoom: 1; }
|
||||||
|
|
||||||
|
/* Nav */
|
||||||
|
nav { padding: 0 16px; }
|
||||||
|
|
||||||
|
/* Dashboard header */
|
||||||
|
.dash-header { padding: 18px 16px 14px; }
|
||||||
|
|
||||||
|
/* Stats bar */
|
||||||
|
.stat-cell { padding: 10px 16px; }
|
||||||
|
|
||||||
|
/* Toolbar — search full-width on first row, filters + button below */
|
||||||
|
.toolbar { flex-wrap: wrap; padding: 10px 16px; gap: 8px; }
|
||||||
|
.search-wrap { max-width: 100%; }
|
||||||
|
.toolbar-right { margin-left: 0; width: 100%; justify-content: flex-end; }
|
||||||
|
|
||||||
|
/* Instance grid — single column */
|
||||||
|
.instance-grid {
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
padding: 12px 16px;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Detail page */
|
||||||
|
.detail-page { padding: 16px; }
|
||||||
|
|
||||||
|
/* Detail header — stack title block above actions */
|
||||||
|
.detail-header { flex-direction: column; align-items: flex-start; gap: 14px; }
|
||||||
|
|
||||||
|
/* Detail sub — wrap items when they don't fit */
|
||||||
|
.detail-sub { flex-wrap: wrap; row-gap: 4px; }
|
||||||
|
|
||||||
|
/* Detail grid — single column */
|
||||||
|
.detail-grid { grid-template-columns: 1fr; }
|
||||||
|
|
||||||
|
/* Toggle grid — 2 columns instead of 3 */
|
||||||
|
.toggle-grid { grid-template-columns: 1fr 1fr; }
|
||||||
|
|
||||||
|
/* Confirm box — no fixed width on mobile */
|
||||||
|
.confirm-box { width: auto; max-width: calc(100vw - 32px); padding: 18px; }
|
||||||
|
|
||||||
|
/* History timeline — stack timestamp above event */
|
||||||
|
.tl-item { flex-direction: column; align-items: flex-start; gap: 3px; }
|
||||||
|
.tl-time { order: -1; }
|
||||||
|
|
||||||
|
/* Toast — stretch across bottom */
|
||||||
|
.toast { right: 16px; left: 16px; bottom: 16px; }
|
||||||
|
|
||||||
|
/* Jobs — stack sidebar above detail */
|
||||||
|
.jobs-layout { grid-template-columns: 1fr; }
|
||||||
|
.jobs-sidebar { border-right: none; border-bottom: 1px solid var(--border); }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── JOBS PAGE ───────────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.jobs-layout {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 220px 1fr;
|
||||||
|
height: calc(100vh - 48px);
|
||||||
|
}
|
||||||
|
.jobs-sidebar {
|
||||||
|
border-right: 1px solid var(--border);
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
.jobs-sidebar-title {
|
||||||
|
padding: 16px 16px 8px;
|
||||||
|
font-size: 10px;
|
||||||
|
font-weight: 600;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.1em;
|
||||||
|
color: var(--text3);
|
||||||
|
}
|
||||||
|
.job-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
padding: 12px 16px;
|
||||||
|
cursor: pointer;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
.job-item:hover, .job-item.active { background: var(--bg2); }
|
||||||
|
.job-item-name { font-size: 13px; color: var(--text); }
|
||||||
|
.jobs-detail {
|
||||||
|
padding: 28px 32px;
|
||||||
|
overflow-y: auto;
|
||||||
|
max-width: 600px;
|
||||||
|
}
|
||||||
|
.jobs-detail-hd { margin-bottom: 20px; }
|
||||||
|
.jobs-detail-title { font-size: 17px; font-weight: 600; color: var(--text); }
|
||||||
|
.jobs-detail-desc { font-size: 12px; color: var(--text2); margin-top: 4px; line-height: 1.6; }
|
||||||
|
.job-actions { display: flex; gap: 8px; margin: 16px 0 0; }
|
||||||
|
.jobs-placeholder { padding: 48px 32px; color: var(--text3); font-size: 13px; }
|
||||||
|
|
||||||
|
/* Shared job status dot */
|
||||||
|
.job-dot {
|
||||||
|
width: 7px;
|
||||||
|
height: 7px;
|
||||||
|
border-radius: 50%;
|
||||||
|
flex-shrink: 0;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.job-dot--success { background: var(--accent); }
|
||||||
|
.job-dot--error { background: var(--red); }
|
||||||
|
.job-dot--running { background: var(--amber); animation: pulse 2s ease-in-out infinite; }
|
||||||
|
.job-dot--none { background: var(--border2); }
|
||||||
|
|
||||||
|
/* Run history list */
|
||||||
|
.run-item {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 10px 1fr 60px 1fr;
|
||||||
|
gap: 0 12px;
|
||||||
|
padding: 7px 0;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
font-size: 12px;
|
||||||
|
align-items: baseline;
|
||||||
|
}
|
||||||
|
.run-item:last-child { border-bottom: none; }
|
||||||
|
.run-time { color: var(--text3); }
|
||||||
|
.run-status { color: var(--text2); }
|
||||||
|
.run-result { color: var(--text); }
|
||||||
|
.run-empty { color: var(--text3); font-size: 12px; padding: 8px 0; }
|
||||||
|
|
||||||
|
/* Nav dot */
|
||||||
|
.nav-job-dot {
|
||||||
|
display: inline-block;
|
||||||
|
width: 6px;
|
||||||
|
height: 6px;
|
||||||
|
border-radius: 50%;
|
||||||
|
margin-left: 5px;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
.nav-job-dot--success { background: var(--accent); }
|
||||||
|
.nav-job-dot--error { background: var(--red); }
|
||||||
|
.nav-job-dot--none { display: none; }
|
||||||
|
|||||||
0
data/.gitkeep
Normal file
0
data/.gitkeep
Normal file
@@ -3,4 +3,11 @@ services:
|
|||||||
image: ${REGISTRY:-gitea.thewrightserver.net/josh}/catalyst:${TAG:-latest}
|
image: ${REGISTRY:-gitea.thewrightserver.net/josh}/catalyst:${TAG:-latest}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- "${PORT:-3000}:80"
|
- "${PORT:-3000}:3000"
|
||||||
|
volumes:
|
||||||
|
- catalyst-data:/app/data
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=production
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
catalyst-data:
|
||||||
|
|||||||
52
index.html
52
index.html
@@ -3,6 +3,7 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<base href="/">
|
||||||
<title>Catalyst</title>
|
<title>Catalyst</title>
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
@@ -21,6 +22,8 @@
|
|||||||
<span class="nav-divider">·</span>
|
<span class="nav-divider">·</span>
|
||||||
<span id="nav-version"></span>
|
<span id="nav-version"></span>
|
||||||
</div>
|
</div>
|
||||||
|
<button class="nav-btn" onclick="navigate('jobs')">Jobs <span id="nav-jobs-dot" class="nav-job-dot nav-job-dot--none"></span></button>
|
||||||
|
<button class="nav-btn" onclick="openSettingsModal()" title="Settings">⚙</button>
|
||||||
</nav>
|
</nav>
|
||||||
|
|
||||||
<main>
|
<main>
|
||||||
@@ -67,7 +70,6 @@
|
|||||||
<div class="detail-name" id="detail-name">—</div>
|
<div class="detail-name" id="detail-name">—</div>
|
||||||
<div class="detail-sub">
|
<div class="detail-sub">
|
||||||
<span><span class="lbl">vmid</span> <span class="val" id="detail-vmid-sub">—</span></span>
|
<span><span class="lbl">vmid</span> <span class="val" id="detail-vmid-sub">—</span></span>
|
||||||
<span><span class="lbl">id</span> <span class="val" id="detail-id-sub">—</span></span>
|
|
||||||
<span><span class="lbl">created</span> <span class="val" id="detail-created-sub">—</span></span>
|
<span><span class="lbl">created</span> <span class="val" id="detail-created-sub">—</span></span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -90,12 +92,25 @@
|
|||||||
<div class="services-grid" id="detail-services"></div>
|
<div class="services-grid" id="detail-services"></div>
|
||||||
</div>
|
</div>
|
||||||
<div class="detail-section full">
|
<div class="detail-section full">
|
||||||
<div class="section-title">timestamps</div>
|
<div class="section-title">history</div>
|
||||||
<div id="detail-timestamps"></div>
|
<div id="detail-timestamps"></div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- JOBS PAGE -->
|
||||||
|
<div class="page" id="page-jobs">
|
||||||
|
<div class="jobs-layout">
|
||||||
|
<div class="jobs-sidebar">
|
||||||
|
<div class="jobs-sidebar-title">Jobs</div>
|
||||||
|
<div id="jobs-list"></div>
|
||||||
|
</div>
|
||||||
|
<div class="jobs-detail" id="jobs-detail">
|
||||||
|
<div class="jobs-placeholder">Select a job</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -170,13 +185,44 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- SETTINGS MODAL -->
|
||||||
|
<div id="settings-modal" class="modal-overlay">
|
||||||
|
<div class="modal">
|
||||||
|
<div class="modal-header">
|
||||||
|
<span class="modal-title">Settings</span>
|
||||||
|
<button class="modal-close" onclick="closeSettingsModal()">✕</button>
|
||||||
|
</div>
|
||||||
|
<div class="modal-body">
|
||||||
|
<div class="settings-section">
|
||||||
|
<div class="settings-section-title">Display</div>
|
||||||
|
<div class="settings-row">
|
||||||
|
<label class="settings-label" for="tz-select">Timezone</label>
|
||||||
|
<select id="tz-select" class="form-input settings-select"></select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="settings-section">
|
||||||
|
<div class="settings-section-title">Export</div>
|
||||||
|
<p class="settings-desc">Download all instance data as a JSON backup file.</p>
|
||||||
|
<button class="btn btn-secondary" onclick="exportDB()">Export Database</button>
|
||||||
|
</div>
|
||||||
|
<div class="settings-section">
|
||||||
|
<div class="settings-section-title">Import</div>
|
||||||
|
<p class="settings-desc">Restore from a backup file. This replaces all current instances.</p>
|
||||||
|
<div class="import-row">
|
||||||
|
<input type="file" id="import-file" accept=".json" class="form-input import-file-input">
|
||||||
|
<button class="btn btn-danger" onclick="importDB()">Import</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- TOAST -->
|
<!-- TOAST -->
|
||||||
<div class="toast" id="toast">
|
<div class="toast" id="toast">
|
||||||
<div class="toast-dot"></div>
|
<div class="toast-dot"></div>
|
||||||
<span id="toast-msg"></span>
|
<span id="toast-msg"></span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/sql.js/1.10.2/sql-wasm.js"></script>
|
|
||||||
<script src="js/version.js" onerror="window.VERSION=null"></script>
|
<script src="js/version.js" onerror="window.VERSION=null"></script>
|
||||||
<script src="js/config.js"></script>
|
<script src="js/config.js"></script>
|
||||||
<script src="js/db.js"></script>
|
<script src="js/db.js"></script>
|
||||||
|
|||||||
23
js/app.js
23
js/app.js
@@ -11,16 +11,24 @@ function navigate(page, vmid) {
|
|||||||
document.getElementById('page-detail').classList.add('active');
|
document.getElementById('page-detail').classList.add('active');
|
||||||
history.pushState({ page: 'instance', vmid }, '', `/instance/${vmid}`);
|
history.pushState({ page: 'instance', vmid }, '', `/instance/${vmid}`);
|
||||||
renderDetailPage(vmid);
|
renderDetailPage(vmid);
|
||||||
|
} else if (page === 'jobs') {
|
||||||
|
document.getElementById('page-jobs').classList.add('active');
|
||||||
|
history.pushState({ page: 'jobs' }, '', '/jobs');
|
||||||
|
renderJobsPage();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleRoute() {
|
function handleRoute() {
|
||||||
const m = window.location.pathname.match(/^\/instance\/(\d+)/);
|
const m = window.location.pathname.match(/^\/instance\/(\d+)/);
|
||||||
if (m) {
|
if (window.location.pathname === '/jobs') {
|
||||||
|
document.getElementById('page-jobs').classList.add('active');
|
||||||
|
renderJobsPage();
|
||||||
|
} else if (m) {
|
||||||
document.getElementById('page-detail').classList.add('active');
|
document.getElementById('page-detail').classList.add('active');
|
||||||
renderDetailPage(parseInt(m[1], 10));
|
renderDetailPage(parseInt(m[1], 10));
|
||||||
} else {
|
} else {
|
||||||
document.getElementById('page-dashboard').classList.add('active');
|
document.getElementById('page-dashboard').classList.add('active');
|
||||||
|
renderDashboard();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -29,6 +37,9 @@ window.addEventListener('popstate', e => {
|
|||||||
if (e.state?.page === 'instance') {
|
if (e.state?.page === 'instance') {
|
||||||
document.getElementById('page-detail').classList.add('active');
|
document.getElementById('page-detail').classList.add('active');
|
||||||
renderDetailPage(e.state.vmid);
|
renderDetailPage(e.state.vmid);
|
||||||
|
} else if (e.state?.page === 'jobs') {
|
||||||
|
document.getElementById('page-jobs').classList.add('active');
|
||||||
|
renderJobsPage();
|
||||||
} else {
|
} else {
|
||||||
document.getElementById('page-dashboard').classList.add('active');
|
document.getElementById('page-dashboard').classList.add('active');
|
||||||
renderDashboard();
|
renderDashboard();
|
||||||
@@ -37,9 +48,9 @@ window.addEventListener('popstate', e => {
|
|||||||
|
|
||||||
// ── Bootstrap ─────────────────────────────────────────────────────────────────
|
// ── Bootstrap ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
if (VERSION) document.getElementById('nav-version').textContent = `v${VERSION}`;
|
if (VERSION) {
|
||||||
|
const label = /^\d/.test(VERSION) ? `v${VERSION}` : VERSION;
|
||||||
|
document.getElementById('nav-version').textContent = label;
|
||||||
|
}
|
||||||
|
|
||||||
initDB().then(() => {
|
handleRoute();
|
||||||
renderDashboard();
|
|
||||||
handleRoute();
|
|
||||||
});
|
|
||||||
|
|||||||
19
js/config.js
19
js/config.js
@@ -1,21 +1,6 @@
|
|||||||
// Services shown as dots on instance cards (all tracked services)
|
// Services shown as dots on instance cards
|
||||||
const CARD_SERVICES = ['atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda'];
|
const CARD_SERVICES = ['atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda'];
|
||||||
|
|
||||||
// Services shown in the detail page service grid
|
// Services shown in the detail page service grid
|
||||||
// (tailscale is shown separately under "network" alongside its IP)
|
// (tailscale lives in the network section alongside its IP)
|
||||||
const DETAIL_SERVICES = ['atlas', 'argus', 'semaphore', 'patchmon', 'andromeda'];
|
const DETAIL_SERVICES = ['atlas', 'argus', 'semaphore', 'patchmon', 'andromeda'];
|
||||||
|
|
||||||
const SQL_JS_CDN = 'https://cdnjs.cloudflare.com/ajax/libs/sql.js/1.10.2/';
|
|
||||||
|
|
||||||
const STORAGE_KEY = 'catalyst_db';
|
|
||||||
|
|
||||||
const SEED = [
|
|
||||||
{ name: 'plex', state: 'deployed', stack: 'production', vmid: 117, atlas: true, argus: true, semaphore: false, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.1', hardware_acceleration: true },
|
|
||||||
{ name: 'foldergram', state: 'testing', stack: 'development', vmid: 137, atlas: false, argus: false, semaphore: false, patchmon: false, tailscale: false, andromeda: false, tailscale_ip: '', hardware_acceleration: false },
|
|
||||||
{ name: 'homeassistant', state: 'deployed', stack: 'production', vmid: 102, atlas: true, argus: true, semaphore: true, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.5', hardware_acceleration: false },
|
|
||||||
{ name: 'gitea', state: 'deployed', stack: 'production', vmid: 110, atlas: true, argus: false, semaphore: true, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.8', hardware_acceleration: false },
|
|
||||||
{ name: 'postgres-primary', state: 'degraded', stack: 'production', vmid: 201, atlas: true, argus: true, semaphore: false, patchmon: true, tailscale: false, andromeda: true, tailscale_ip: '', hardware_acceleration: false },
|
|
||||||
{ name: 'nextcloud', state: 'testing', stack: 'development', vmid: 144, atlas: false, argus: false, semaphore: false, patchmon: false, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.12', hardware_acceleration: false },
|
|
||||||
{ name: 'traefik', state: 'deployed', stack: 'production', vmid: 100, atlas: true, argus: true, semaphore: false, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.2', hardware_acceleration: false },
|
|
||||||
{ name: 'monitoring-stack', state: 'testing', stack: 'development', vmid: 155, atlas: false, argus: false, semaphore: true, patchmon: false, tailscale: false, andromeda: false, tailscale_ip: '', hardware_acceleration: false },
|
|
||||||
];
|
|
||||||
|
|||||||
183
js/db.js
183
js/db.js
@@ -1,159 +1,62 @@
|
|||||||
let db = null;
|
// API client — replaces the sql.js database layer.
|
||||||
|
// Swap these fetch() calls for any other transport when needed.
|
||||||
|
|
||||||
// ── Persistence ──────────────────────────────────────────────────────────────
|
const BASE = '/api';
|
||||||
|
|
||||||
function saveToStorage() {
|
async function api(path, options = {}) {
|
||||||
try {
|
const res = await fetch(BASE + path, options);
|
||||||
const data = db.export(); // Uint8Array
|
if (res.status === 204) return null;
|
||||||
let binary = '';
|
return res.json().then(data => ({ ok: res.ok, status: res.status, data }));
|
||||||
const chunk = 8192;
|
|
||||||
for (let i = 0; i < data.length; i += chunk) {
|
|
||||||
binary += String.fromCharCode(...data.subarray(i, i + chunk));
|
|
||||||
}
|
|
||||||
localStorage.setItem(STORAGE_KEY, btoa(binary));
|
|
||||||
} catch (e) {
|
|
||||||
console.warn('catalyst: failed to persist database', e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadFromStorage() {
|
// ── Queries ───────────────────────────────────────────────────────────────────
|
||||||
try {
|
|
||||||
const stored = localStorage.getItem(STORAGE_KEY);
|
async function getInstances(filters = {}) {
|
||||||
if (!stored) return null;
|
const params = new URLSearchParams(
|
||||||
const binary = atob(stored);
|
Object.entries(filters).filter(([, v]) => v)
|
||||||
const buf = new Uint8Array(binary.length);
|
);
|
||||||
for (let i = 0; i < binary.length; i++) buf[i] = binary.charCodeAt(i);
|
const res = await fetch(`${BASE}/instances?${params}`);
|
||||||
return buf;
|
return res.json();
|
||||||
} catch (e) {
|
|
||||||
console.warn('catalyst: failed to load database from storage', e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Init ─────────────────────────────────────────────────────────────────────
|
async function getInstance(vmid) {
|
||||||
|
const res = await fetch(`${BASE}/instances/${vmid}`);
|
||||||
async function initDB() {
|
if (res.status === 404) return null;
|
||||||
const SQL = await initSqlJs({ locateFile: f => SQL_JS_CDN + f });
|
return res.json();
|
||||||
|
|
||||||
const saved = loadFromStorage();
|
|
||||||
if (saved) {
|
|
||||||
db = new SQL.Database(saved);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db = new SQL.Database();
|
|
||||||
db.run(`
|
|
||||||
CREATE TABLE instances (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
state TEXT DEFAULT 'deployed',
|
|
||||||
stack TEXT DEFAULT '',
|
|
||||||
vmid INTEGER UNIQUE NOT NULL,
|
|
||||||
atlas INTEGER DEFAULT 0,
|
|
||||||
argus INTEGER DEFAULT 0,
|
|
||||||
semaphore INTEGER DEFAULT 0,
|
|
||||||
patchmon INTEGER DEFAULT 0,
|
|
||||||
tailscale INTEGER DEFAULT 0,
|
|
||||||
andromeda INTEGER DEFAULT 0,
|
|
||||||
tailscale_ip TEXT DEFAULT '',
|
|
||||||
hardware_acceleration INTEGER DEFAULT 0,
|
|
||||||
createdAt TEXT DEFAULT (datetime('now')),
|
|
||||||
updatedAt TEXT DEFAULT (datetime('now'))
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
const stmt = db.prepare(`
|
|
||||||
INSERT INTO instances
|
|
||||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon, tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
||||||
`);
|
|
||||||
|
|
||||||
SEED.forEach(s => stmt.run([
|
|
||||||
s.name, s.state, s.stack, s.vmid,
|
|
||||||
+s.atlas, +s.argus, +s.semaphore, +s.patchmon,
|
|
||||||
+s.tailscale, +s.andromeda, s.tailscale_ip, +s.hardware_acceleration,
|
|
||||||
]));
|
|
||||||
|
|
||||||
stmt.free();
|
|
||||||
saveToStorage();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Queries ──────────────────────────────────────────────────────────────────
|
async function getDistinctStacks() {
|
||||||
|
const res = await fetch(`${BASE}/instances/stacks`);
|
||||||
function getInstances(filters = {}) {
|
return res.json();
|
||||||
let sql = 'SELECT * FROM instances WHERE 1=1';
|
|
||||||
const params = [];
|
|
||||||
|
|
||||||
if (filters.search) {
|
|
||||||
sql += ' AND (name LIKE ? OR CAST(vmid AS TEXT) LIKE ? OR stack LIKE ?)';
|
|
||||||
const s = `%${filters.search}%`;
|
|
||||||
params.push(s, s, s);
|
|
||||||
}
|
|
||||||
if (filters.state) { sql += ' AND state = ?'; params.push(filters.state); }
|
|
||||||
if (filters.stack) { sql += ' AND stack = ?'; params.push(filters.stack); }
|
|
||||||
|
|
||||||
sql += ' ORDER BY name ASC';
|
|
||||||
|
|
||||||
const res = db.exec(sql, params);
|
|
||||||
if (!res.length) return [];
|
|
||||||
const cols = res[0].columns;
|
|
||||||
return res[0].values.map(row => Object.fromEntries(cols.map((c, i) => [c, row[i]])));
|
|
||||||
}
|
|
||||||
|
|
||||||
function getInstance(vmid) {
|
|
||||||
const res = db.exec('SELECT * FROM instances WHERE vmid = ?', [vmid]);
|
|
||||||
if (!res.length) return null;
|
|
||||||
const cols = res[0].columns;
|
|
||||||
return Object.fromEntries(cols.map((c, i) => [c, res[0].values[0][i]]));
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDistinctStacks() {
|
|
||||||
const res = db.exec(`SELECT DISTINCT stack FROM instances WHERE stack != '' ORDER BY stack`);
|
|
||||||
if (!res.length) return [];
|
|
||||||
return res[0].values.map(row => row[0]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Mutations ─────────────────────────────────────────────────────────────────
|
// ── Mutations ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function createInstance(data) {
|
async function createInstance(data) {
|
||||||
try {
|
const { ok, data: body } = await api('/instances', {
|
||||||
db.run(
|
method: 'POST',
|
||||||
`INSERT INTO instances
|
headers: { 'Content-Type': 'application/json' },
|
||||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon, tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
body: JSON.stringify(data),
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
});
|
||||||
[data.name, data.state, data.stack, data.vmid,
|
if (!ok) return { ok: false, error: body.error ?? body.errors?.[0] ?? 'error creating instance' };
|
||||||
data.atlas, data.argus, data.semaphore, data.patchmon,
|
|
||||||
data.tailscale, data.andromeda, data.tailscale_ip, data.hardware_acceleration]
|
|
||||||
);
|
|
||||||
saveToStorage();
|
|
||||||
return { ok: true };
|
return { ok: true };
|
||||||
} catch (e) {
|
|
||||||
return { ok: false, error: e.message };
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function updateInstance(id, data) {
|
async function updateInstance(vmid, data) {
|
||||||
try {
|
const { ok, data: body } = await api(`/instances/${vmid}`, {
|
||||||
db.run(
|
method: 'PUT',
|
||||||
`UPDATE instances SET
|
headers: { 'Content-Type': 'application/json' },
|
||||||
name=?, state=?, stack=?, vmid=?,
|
body: JSON.stringify(data),
|
||||||
atlas=?, argus=?, semaphore=?, patchmon=?,
|
});
|
||||||
tailscale=?, andromeda=?, tailscale_ip=?, hardware_acceleration=?,
|
if (!ok) return { ok: false, error: body.error ?? body.errors?.[0] ?? 'error updating instance' };
|
||||||
updatedAt=datetime('now')
|
|
||||||
WHERE id=?`,
|
|
||||||
[data.name, data.state, data.stack, data.vmid,
|
|
||||||
data.atlas, data.argus, data.semaphore, data.patchmon,
|
|
||||||
data.tailscale, data.andromeda, data.tailscale_ip, data.hardware_acceleration,
|
|
||||||
id]
|
|
||||||
);
|
|
||||||
saveToStorage();
|
|
||||||
return { ok: true };
|
return { ok: true };
|
||||||
} catch (e) {
|
|
||||||
return { ok: false, error: e.message };
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function deleteInstance(id) {
|
async function deleteInstance(vmid) {
|
||||||
db.run('DELETE FROM instances WHERE id = ?', [id]);
|
await api(`/instances/${vmid}`, { method: 'DELETE' });
|
||||||
saveToStorage();
|
}
|
||||||
|
|
||||||
|
async function getInstanceHistory(vmid) {
|
||||||
|
const res = await fetch(`${BASE}/instances/${vmid}/history`);
|
||||||
|
return res.json();
|
||||||
}
|
}
|
||||||
|
|||||||
366
js/ui.js
366
js/ui.js
@@ -1,8 +1,36 @@
|
|||||||
// Module-level UI state
|
// Module-level UI state
|
||||||
let editingId = null;
|
let editingVmid = null;
|
||||||
let currentVmid = null;
|
let currentVmid = null;
|
||||||
let toastTimer = null;
|
let toastTimer = null;
|
||||||
|
|
||||||
|
// ── Timezone ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const TIMEZONES = [
|
||||||
|
{ label: 'UTC', tz: 'UTC' },
|
||||||
|
{ label: 'Hawaii (HST)', tz: 'Pacific/Honolulu' },
|
||||||
|
{ label: 'Alaska (AKT)', tz: 'America/Anchorage' },
|
||||||
|
{ label: 'Pacific (PT)', tz: 'America/Los_Angeles' },
|
||||||
|
{ label: 'Mountain (MT)', tz: 'America/Denver' },
|
||||||
|
{ label: 'Central (CT)', tz: 'America/Chicago' },
|
||||||
|
{ label: 'Eastern (ET)', tz: 'America/New_York' },
|
||||||
|
{ label: 'Atlantic (AT)', tz: 'America/Halifax' },
|
||||||
|
{ label: 'London (GMT/BST)', tz: 'Europe/London' },
|
||||||
|
{ label: 'Paris / Berlin (CET)', tz: 'Europe/Paris' },
|
||||||
|
{ label: 'Helsinki (EET)', tz: 'Europe/Helsinki' },
|
||||||
|
{ label: 'Istanbul (TRT)', tz: 'Europe/Istanbul' },
|
||||||
|
{ label: 'Dubai (GST)', tz: 'Asia/Dubai' },
|
||||||
|
{ label: 'India (IST)', tz: 'Asia/Kolkata' },
|
||||||
|
{ label: 'Singapore (SGT)', tz: 'Asia/Singapore' },
|
||||||
|
{ label: 'China (CST)', tz: 'Asia/Shanghai' },
|
||||||
|
{ label: 'Japan / Korea (JST/KST)', tz: 'Asia/Tokyo' },
|
||||||
|
{ label: 'Sydney (AEST)', tz: 'Australia/Sydney' },
|
||||||
|
{ label: 'Auckland (NZST)', tz: 'Pacific/Auckland' },
|
||||||
|
];
|
||||||
|
|
||||||
|
function getTimezone() {
|
||||||
|
return localStorage.getItem('catalyst_tz') || 'UTC';
|
||||||
|
}
|
||||||
|
|
||||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function esc(str) {
|
function esc(str) {
|
||||||
@@ -11,24 +39,32 @@ function esc(str) {
|
|||||||
return d.innerHTML;
|
return d.innerHTML;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLite datetime('now') → 'YYYY-MM-DD HH:MM:SS' (UTC, no timezone marker).
|
||||||
|
// Appending 'Z' tells JS to parse it as UTC rather than local time.
|
||||||
|
function parseUtc(d) {
|
||||||
|
if (typeof d !== 'string') return new Date(d);
|
||||||
|
const hasZone = d.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(d);
|
||||||
|
return new Date(hasZone ? d : d.replace(' ', 'T') + 'Z');
|
||||||
|
}
|
||||||
|
|
||||||
function fmtDate(d) {
|
function fmtDate(d) {
|
||||||
if (!d) return '—';
|
if (!d) return '—';
|
||||||
try {
|
try {
|
||||||
return new Date(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric' });
|
return parseUtc(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric', timeZone: getTimezone() });
|
||||||
} catch (e) { return d; }
|
} catch (e) { return d; }
|
||||||
}
|
}
|
||||||
|
|
||||||
function fmtDateFull(d) {
|
function fmtDateFull(d) {
|
||||||
if (!d) return '—';
|
if (!d) return '—';
|
||||||
try {
|
try {
|
||||||
return new Date(d).toLocaleString('en-US', { year: 'numeric', month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit' });
|
return parseUtc(d).toLocaleString('en-US', { year: 'numeric', month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit', timeZone: getTimezone(), timeZoneName: 'short' });
|
||||||
} catch (e) { return d; }
|
} catch (e) { return d; }
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Dashboard ─────────────────────────────────────────────────────────────────
|
// ── Dashboard ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function renderDashboard() {
|
async function renderDashboard() {
|
||||||
const all = getInstances();
|
const all = await getInstances();
|
||||||
document.getElementById('nav-count').textContent = `${all.length} instance${all.length !== 1 ? 's' : ''}`;
|
document.getElementById('nav-count').textContent = `${all.length} instance${all.length !== 1 ? 's' : ''}`;
|
||||||
|
|
||||||
const states = {};
|
const states = {};
|
||||||
@@ -39,18 +75,18 @@ function renderDashboard() {
|
|||||||
<div class="stat-cell"><div class="stat-label">deployed</div><div class="stat-value">${states['deployed'] || 0}</div></div>
|
<div class="stat-cell"><div class="stat-label">deployed</div><div class="stat-value">${states['deployed'] || 0}</div></div>
|
||||||
<div class="stat-cell"><div class="stat-label">testing</div><div class="stat-value amber">${states['testing'] || 0}</div></div>
|
<div class="stat-cell"><div class="stat-label">testing</div><div class="stat-value amber">${states['testing'] || 0}</div></div>
|
||||||
<div class="stat-cell"><div class="stat-label">degraded</div><div class="stat-value red">${states['degraded'] || 0}</div></div>
|
<div class="stat-cell"><div class="stat-label">degraded</div><div class="stat-value red">${states['degraded'] || 0}</div></div>
|
||||||
<div class="stat-cell"><div class="stat-label">stacks</div><div class="stat-value">${getDistinctStacks().length}</div></div>
|
|
||||||
`;
|
`;
|
||||||
|
|
||||||
populateStackFilter();
|
await populateStackFilter();
|
||||||
filterInstances();
|
await filterInstances();
|
||||||
}
|
}
|
||||||
|
|
||||||
function populateStackFilter() {
|
async function populateStackFilter() {
|
||||||
const select = document.getElementById('filter-stack');
|
const select = document.getElementById('filter-stack');
|
||||||
const current = select.value;
|
const current = select.value;
|
||||||
select.innerHTML = '<option value="">all stacks</option>';
|
select.innerHTML = '<option value="">all stacks</option>';
|
||||||
getDistinctStacks().forEach(s => {
|
const stacks = await getDistinctStacks();
|
||||||
|
stacks.forEach(s => {
|
||||||
const opt = document.createElement('option');
|
const opt = document.createElement('option');
|
||||||
opt.value = s;
|
opt.value = s;
|
||||||
opt.textContent = s;
|
opt.textContent = s;
|
||||||
@@ -59,11 +95,11 @@ function populateStackFilter() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function filterInstances() {
|
async function filterInstances() {
|
||||||
const search = document.getElementById('search-input').value;
|
const search = document.getElementById('search-input').value;
|
||||||
const state = document.getElementById('filter-state').value;
|
const state = document.getElementById('filter-state').value;
|
||||||
const stack = document.getElementById('filter-stack').value;
|
const stack = document.getElementById('filter-stack').value;
|
||||||
const instances = getInstances({ search, state, stack });
|
const instances = await getInstances({ search, state, stack });
|
||||||
const grid = document.getElementById('instance-grid');
|
const grid = document.getElementById('instance-grid');
|
||||||
|
|
||||||
if (!instances.length) {
|
if (!instances.length) {
|
||||||
@@ -76,7 +112,6 @@ function filterInstances() {
|
|||||||
`<div class="svc-dot ${inst[s] ? 'on' : ''}" title="${s}"></div>`
|
`<div class="svc-dot ${inst[s] ? 'on' : ''}" title="${s}"></div>`
|
||||||
).join('');
|
).join('');
|
||||||
const activeCount = CARD_SERVICES.filter(s => inst[s]).length;
|
const activeCount = CARD_SERVICES.filter(s => inst[s]).length;
|
||||||
|
|
||||||
return `
|
return `
|
||||||
<div class="instance-card state-${esc(inst.state)}" onclick="navigate('instance', ${inst.vmid})">
|
<div class="instance-card state-${esc(inst.state)}" onclick="navigate('instance', ${inst.vmid})">
|
||||||
<div class="card-top">
|
<div class="card-top">
|
||||||
@@ -100,23 +135,50 @@ function filterInstances() {
|
|||||||
|
|
||||||
// ── Detail Page ───────────────────────────────────────────────────────────────
|
// ── Detail Page ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function renderDetailPage(vmid) {
|
const BOOL_FIELDS = ['atlas','argus','semaphore','patchmon','tailscale','andromeda','hardware_acceleration'];
|
||||||
const inst = getInstance(vmid);
|
|
||||||
|
const FIELD_LABELS = {
|
||||||
|
name: 'name',
|
||||||
|
state: 'state',
|
||||||
|
stack: 'stack',
|
||||||
|
vmid: 'vmid',
|
||||||
|
tailscale_ip: 'tailscale ip',
|
||||||
|
atlas: 'atlas',
|
||||||
|
argus: 'argus',
|
||||||
|
semaphore: 'semaphore',
|
||||||
|
patchmon: 'patchmon',
|
||||||
|
tailscale: 'tailscale',
|
||||||
|
andromeda: 'andromeda',
|
||||||
|
hardware_acceleration: 'hw acceleration',
|
||||||
|
};
|
||||||
|
|
||||||
|
function stateClass(field, val) {
|
||||||
|
if (field !== 'state') return '';
|
||||||
|
return { deployed: 'tl-deployed', testing: 'tl-testing', degraded: 'tl-degraded' }[val] ?? '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function fmtHistVal(field, val) {
|
||||||
|
if (val == null || val === '') return '—';
|
||||||
|
if (BOOL_FIELDS.includes(field)) return val === '1' ? 'on' : 'off';
|
||||||
|
return esc(val);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function renderDetailPage(vmid) {
|
||||||
|
const [inst, history, all] = await Promise.all([getInstance(vmid), getInstanceHistory(vmid), getInstances()]);
|
||||||
if (!inst) { navigate('dashboard'); return; }
|
if (!inst) { navigate('dashboard'); return; }
|
||||||
currentVmid = vmid;
|
currentVmid = vmid;
|
||||||
|
document.getElementById('nav-count').textContent = `${all.length} instance${all.length !== 1 ? 's' : ''}`;
|
||||||
|
|
||||||
document.getElementById('detail-vmid-crumb').textContent = vmid;
|
document.getElementById('detail-vmid-crumb').textContent = vmid;
|
||||||
document.getElementById('detail-name').textContent = inst.name;
|
document.getElementById('detail-name').textContent = inst.name;
|
||||||
document.getElementById('detail-vmid-sub').textContent = inst.vmid;
|
document.getElementById('detail-vmid-sub').textContent = inst.vmid;
|
||||||
document.getElementById('detail-id-sub').textContent = inst.id;
|
document.getElementById('detail-created-sub').textContent = fmtDate(inst.created_at);
|
||||||
document.getElementById('detail-created-sub').textContent = fmtDate(inst.createdAt);
|
|
||||||
|
|
||||||
document.getElementById('detail-identity').innerHTML = `
|
document.getElementById('detail-identity').innerHTML = `
|
||||||
<div class="kv-row"><span class="kv-key">name</span><span class="kv-val highlight">${esc(inst.name)}</span></div>
|
<div class="kv-row"><span class="kv-key">name</span><span class="kv-val highlight">${esc(inst.name)}</span></div>
|
||||||
<div class="kv-row"><span class="kv-key">state</span><span class="kv-val"><span class="badge ${esc(inst.state)}">${esc(inst.state)}</span></span></div>
|
<div class="kv-row"><span class="kv-key">state</span><span class="kv-val"><span class="badge ${esc(inst.state)}">${esc(inst.state)}</span></span></div>
|
||||||
<div class="kv-row"><span class="kv-key">stack</span><span class="kv-val highlight">${esc(inst.stack) || '—'}</span></div>
|
<div class="kv-row"><span class="kv-key">stack</span><span class="kv-val"><span class="badge ${esc(inst.stack)}">${esc(inst.stack) || '—'}</span></span></div>
|
||||||
<div class="kv-row"><span class="kv-key">vmid</span><span class="kv-val highlight">${inst.vmid}</span></div>
|
<div class="kv-row"><span class="kv-key">vmid</span><span class="kv-val highlight">${inst.vmid}</span></div>
|
||||||
<div class="kv-row"><span class="kv-key">internal id</span><span class="kv-val">${inst.id}</span></div>
|
|
||||||
`;
|
`;
|
||||||
|
|
||||||
document.getElementById('detail-network').innerHTML = `
|
document.getElementById('detail-network').innerHTML = `
|
||||||
@@ -134,10 +196,30 @@ function renderDetailPage(vmid) {
|
|||||||
</div>
|
</div>
|
||||||
`).join('');
|
`).join('');
|
||||||
|
|
||||||
document.getElementById('detail-timestamps').innerHTML = `
|
document.getElementById('detail-timestamps').innerHTML = history.length
|
||||||
<div class="kv-row"><span class="kv-key">created</span><span class="kv-val">${fmtDateFull(inst.createdAt)}</span></div>
|
? history.map(e => {
|
||||||
<div class="kv-row"><span class="kv-key">updated</span><span class="kv-val">${fmtDateFull(inst.updatedAt)}</span></div>
|
if (e.field === 'created') return `
|
||||||
`;
|
<div class="tl-item tl-created">
|
||||||
|
<span class="tl-event">instance created</span>
|
||||||
|
<span class="tl-time">${fmtDateFull(e.changed_at)}</span>
|
||||||
|
</div>`;
|
||||||
|
const label = FIELD_LABELS[e.field] ?? esc(e.field);
|
||||||
|
const newCls = (e.field === 'state' || e.field === 'stack')
|
||||||
|
? `badge ${esc(e.new_value)}`
|
||||||
|
: `tl-new ${stateClass(e.field, e.new_value)}`;
|
||||||
|
return `
|
||||||
|
<div class="tl-item">
|
||||||
|
<div class="tl-event">
|
||||||
|
<span class="tl-label">${label}</span>
|
||||||
|
<span class="tl-sep">·</span>
|
||||||
|
<span class="tl-old">${fmtHistVal(e.field, e.old_value)}</span>
|
||||||
|
<span class="tl-arrow">→</span>
|
||||||
|
<span class="${newCls}">${fmtHistVal(e.field, e.new_value)}</span>
|
||||||
|
</div>
|
||||||
|
<span class="tl-time">${fmtDateFull(e.changed_at)}</span>
|
||||||
|
</div>`;
|
||||||
|
}).join('')
|
||||||
|
: '<div class="tl-empty">no history yet</div>';
|
||||||
|
|
||||||
document.getElementById('detail-edit-btn').onclick = () => openEditModal(inst.vmid);
|
document.getElementById('detail-edit-btn').onclick = () => openEditModal(inst.vmid);
|
||||||
document.getElementById('detail-delete-btn').onclick = () => confirmDeleteDialog(inst);
|
document.getElementById('detail-delete-btn').onclick = () => confirmDeleteDialog(inst);
|
||||||
@@ -146,16 +228,16 @@ function renderDetailPage(vmid) {
|
|||||||
// ── Modal ─────────────────────────────────────────────────────────────────────
|
// ── Modal ─────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function openNewModal() {
|
function openNewModal() {
|
||||||
editingId = null;
|
editingVmid = null;
|
||||||
document.getElementById('modal-title').textContent = 'new instance';
|
document.getElementById('modal-title').textContent = 'new instance';
|
||||||
clearForm();
|
clearForm();
|
||||||
document.getElementById('instance-modal').classList.add('open');
|
document.getElementById('instance-modal').classList.add('open');
|
||||||
}
|
}
|
||||||
|
|
||||||
function openEditModal(vmid) {
|
async function openEditModal(vmid) {
|
||||||
const inst = getInstance(vmid);
|
const inst = await getInstance(vmid);
|
||||||
if (!inst) return;
|
if (!inst) return;
|
||||||
editingId = inst.id;
|
editingVmid = inst.vmid;
|
||||||
document.getElementById('modal-title').textContent = `edit / ${inst.name}`;
|
document.getElementById('modal-title').textContent = `edit / ${inst.name}`;
|
||||||
document.getElementById('f-name').value = inst.name;
|
document.getElementById('f-name').value = inst.name;
|
||||||
document.getElementById('f-vmid').value = inst.vmid;
|
document.getElementById('f-vmid').value = inst.vmid;
|
||||||
@@ -186,19 +268,18 @@ function clearForm() {
|
|||||||
.forEach(id => { document.getElementById(id).checked = false; });
|
.forEach(id => { document.getElementById(id).checked = false; });
|
||||||
}
|
}
|
||||||
|
|
||||||
function saveInstance() {
|
async function saveInstance() {
|
||||||
const name = document.getElementById('f-name').value.trim();
|
const name = document.getElementById('f-name').value.trim();
|
||||||
const vmid = parseInt(document.getElementById('f-vmid').value, 10);
|
const vmid = parseInt(document.getElementById('f-vmid').value, 10);
|
||||||
const state = document.getElementById('f-state').value;
|
const state = document.getElementById('f-state').value;
|
||||||
const stack = document.getElementById('f-stack').value;
|
const stack = document.getElementById('f-stack').value;
|
||||||
const tip = document.getElementById('f-tailscale-ip').value.trim();
|
|
||||||
|
|
||||||
if (!name) { showToast('name is required', 'error'); return; }
|
if (!name) { showToast('name is required', 'error'); return; }
|
||||||
if (!vmid || vmid < 1) { showToast('a valid vmid is required', 'error'); return; }
|
if (!vmid || vmid < 1) { showToast('a valid vmid is required', 'error'); return; }
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
name, state, stack, vmid,
|
name, state, stack, vmid,
|
||||||
tailscale_ip: tip,
|
tailscale_ip: document.getElementById('f-tailscale-ip').value.trim(),
|
||||||
atlas: +document.getElementById('f-atlas').checked,
|
atlas: +document.getElementById('f-atlas').checked,
|
||||||
argus: +document.getElementById('f-argus').checked,
|
argus: +document.getElementById('f-argus').checked,
|
||||||
semaphore: +document.getElementById('f-semaphore').checked,
|
semaphore: +document.getElementById('f-semaphore').checked,
|
||||||
@@ -208,20 +289,19 @@ function saveInstance() {
|
|||||||
hardware_acceleration: +document.getElementById('f-hardware-accel').checked,
|
hardware_acceleration: +document.getElementById('f-hardware-accel').checked,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = editingId ? updateInstance(editingId, data) : createInstance(data);
|
const result = editingVmid
|
||||||
|
? await updateInstance(editingVmid, data)
|
||||||
|
: await createInstance(data);
|
||||||
|
|
||||||
if (!result.ok) {
|
if (!result.ok) { showToast(result.error, 'error'); return; }
|
||||||
showToast(result.error.includes('UNIQUE') ? 'vmid already exists' : 'error saving instance', 'error');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
showToast(`${name} ${editingId ? 'updated' : 'created'}`, 'success');
|
showToast(`${name} ${editingVmid ? 'updated' : 'created'}`, 'success');
|
||||||
closeModal();
|
closeModal();
|
||||||
|
|
||||||
if (currentVmid && document.getElementById('page-detail').classList.contains('active')) {
|
if (currentVmid && document.getElementById('page-detail').classList.contains('active')) {
|
||||||
renderDetailPage(vmid);
|
await renderDetailPage(vmid);
|
||||||
} else {
|
} else {
|
||||||
renderDashboard();
|
await renderDashboard();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,11 +312,10 @@ function confirmDeleteDialog(inst) {
|
|||||||
showToast(`demote ${inst.name} to development before deleting`, 'error');
|
showToast(`demote ${inst.name} to development before deleting`, 'error');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
document.getElementById('confirm-title').textContent = `delete ${inst.name}?`;
|
document.getElementById('confirm-title').textContent = `delete ${inst.name}?`;
|
||||||
document.getElementById('confirm-msg').textContent =
|
document.getElementById('confirm-msg').textContent =
|
||||||
`This will permanently remove instance "${inst.name}" (vmid: ${inst.vmid}) from Catalyst. This action cannot be undone.`;
|
`This will permanently remove instance "${inst.name}" (vmid: ${inst.vmid}) from Catalyst. This action cannot be undone.`;
|
||||||
document.getElementById('confirm-ok').onclick = () => doDelete(inst.id, inst.name);
|
document.getElementById('confirm-ok').onclick = () => doDelete(inst.vmid, inst.name);
|
||||||
document.getElementById('confirm-overlay').classList.add('open');
|
document.getElementById('confirm-overlay').classList.add('open');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -244,9 +323,9 @@ function closeConfirm() {
|
|||||||
document.getElementById('confirm-overlay').classList.remove('open');
|
document.getElementById('confirm-overlay').classList.remove('open');
|
||||||
}
|
}
|
||||||
|
|
||||||
function doDelete(id, name) {
|
async function doDelete(vmid, name) {
|
||||||
deleteInstance(id);
|
|
||||||
closeConfirm();
|
closeConfirm();
|
||||||
|
await deleteInstance(vmid);
|
||||||
showToast(`${name} deleted`, 'success');
|
showToast(`${name} deleted`, 'success');
|
||||||
navigate('dashboard');
|
navigate('dashboard');
|
||||||
}
|
}
|
||||||
@@ -261,18 +340,217 @@ function showToast(msg, type = 'success') {
|
|||||||
toastTimer = setTimeout(() => t.classList.remove('show'), 3000);
|
toastTimer = setTimeout(() => t.classList.remove('show'), 3000);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Global keyboard handler ───────────────────────────────────────────────────
|
// ── Settings Modal ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function openSettingsModal() {
|
||||||
|
const sel = document.getElementById('tz-select');
|
||||||
|
if (!sel.options.length) {
|
||||||
|
for (const { label, tz } of TIMEZONES) {
|
||||||
|
const opt = document.createElement('option');
|
||||||
|
opt.value = tz;
|
||||||
|
opt.textContent = label;
|
||||||
|
sel.appendChild(opt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sel.value = getTimezone();
|
||||||
|
document.getElementById('settings-modal').classList.add('open');
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeSettingsModal() {
|
||||||
|
document.getElementById('settings-modal').classList.remove('open');
|
||||||
|
document.getElementById('import-file').value = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function exportDB() {
|
||||||
|
const res = await fetch('/api/export');
|
||||||
|
const blob = await res.blob();
|
||||||
|
const url = URL.createObjectURL(blob);
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = url;
|
||||||
|
a.download = `catalyst-backup-${new Date().toISOString().slice(0, 10)}.json`;
|
||||||
|
a.click();
|
||||||
|
URL.revokeObjectURL(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function importDB() {
|
||||||
|
const file = document.getElementById('import-file').files[0];
|
||||||
|
if (!file) { showToast('Select a backup file first', 'error'); return; }
|
||||||
|
document.getElementById('confirm-title').textContent = 'Replace all instances?';
|
||||||
|
document.getElementById('confirm-msg').textContent =
|
||||||
|
`This will delete all current instances and replace them with the contents of "${file.name}". This cannot be undone.`;
|
||||||
|
document.getElementById('confirm-overlay').classList.add('open');
|
||||||
|
document.getElementById('confirm-ok').onclick = async () => {
|
||||||
|
closeConfirm();
|
||||||
|
try {
|
||||||
|
const { instances, history = [], jobs, job_runs } = JSON.parse(await file.text());
|
||||||
|
const res = await fetch('/api/import', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ instances, history, jobs, job_runs }),
|
||||||
|
});
|
||||||
|
const data = await res.json();
|
||||||
|
if (!res.ok) { showToast(data.error ?? 'Import failed', 'error'); return; }
|
||||||
|
const parts = [`${data.imported} instance${data.imported !== 1 ? 's' : ''}`];
|
||||||
|
if (data.imported_jobs != null) parts.push(`${data.imported_jobs} job${data.imported_jobs !== 1 ? 's' : ''}`);
|
||||||
|
showToast(`Imported ${parts.join(', ')}`, 'success');
|
||||||
|
closeSettingsModal();
|
||||||
|
renderDashboard();
|
||||||
|
} catch {
|
||||||
|
showToast('Invalid backup file', 'error');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Keyboard / backdrop ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
document.addEventListener('keydown', e => {
|
document.addEventListener('keydown', e => {
|
||||||
if (e.key !== 'Escape') return;
|
if (e.key !== 'Escape') return;
|
||||||
if (document.getElementById('instance-modal').classList.contains('open')) { closeModal(); return; }
|
if (document.getElementById('instance-modal').classList.contains('open')) { closeModal(); return; }
|
||||||
if (document.getElementById('confirm-overlay').classList.contains('open')) { closeConfirm(); return; }
|
if (document.getElementById('confirm-overlay').classList.contains('open')) { closeConfirm(); return; }
|
||||||
|
if (document.getElementById('settings-modal').classList.contains('open')) { closeSettingsModal(); return; }
|
||||||
});
|
});
|
||||||
|
|
||||||
// Close modals on backdrop click
|
|
||||||
document.getElementById('instance-modal').addEventListener('click', e => {
|
document.getElementById('instance-modal').addEventListener('click', e => {
|
||||||
if (e.target === document.getElementById('instance-modal')) closeModal();
|
if (e.target === document.getElementById('instance-modal')) closeModal();
|
||||||
});
|
});
|
||||||
document.getElementById('confirm-overlay').addEventListener('click', e => {
|
document.getElementById('confirm-overlay').addEventListener('click', e => {
|
||||||
if (e.target === document.getElementById('confirm-overlay')) closeConfirm();
|
if (e.target === document.getElementById('confirm-overlay')) closeConfirm();
|
||||||
});
|
});
|
||||||
|
document.getElementById('settings-modal').addEventListener('click', e => {
|
||||||
|
if (e.target === document.getElementById('settings-modal')) closeSettingsModal();
|
||||||
|
});
|
||||||
|
|
||||||
|
document.getElementById('tz-select').addEventListener('change', e => {
|
||||||
|
localStorage.setItem('catalyst_tz', e.target.value);
|
||||||
|
const m = window.location.pathname.match(/^\/instance\/(\d+)/);
|
||||||
|
if (m) renderDetailPage(parseInt(m[1], 10));
|
||||||
|
else renderDashboard();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Jobs Page ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async function renderJobsPage() {
|
||||||
|
const jobs = await fetch('/api/jobs').then(r => r.json());
|
||||||
|
_updateJobsNavDot(jobs);
|
||||||
|
document.getElementById('jobs-list').innerHTML = jobs.length
|
||||||
|
? jobs.map(j => `
|
||||||
|
<div class="job-item" id="job-item-${j.id}" onclick="loadJobDetail(${j.id})">
|
||||||
|
<span class="job-dot job-dot--${j.last_status ?? 'none'}"></span>
|
||||||
|
<span class="job-item-name">${esc(j.name)}</span>
|
||||||
|
</div>`).join('')
|
||||||
|
: '<div class="jobs-placeholder">No jobs</div>';
|
||||||
|
if (jobs.length) loadJobDetail(jobs[0].id);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadJobDetail(jobId) {
|
||||||
|
document.querySelectorAll('.job-item').forEach(el => el.classList.remove('active'));
|
||||||
|
document.getElementById(`job-item-${jobId}`)?.classList.add('active');
|
||||||
|
const job = await fetch(`/api/jobs/${jobId}`).then(r => r.json());
|
||||||
|
const cfg = job.config ?? {};
|
||||||
|
document.getElementById('jobs-detail').innerHTML = `
|
||||||
|
<div class="jobs-detail-hd">
|
||||||
|
<div class="jobs-detail-title">${esc(job.name)}</div>
|
||||||
|
<div class="jobs-detail-desc">${esc(job.description)}</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label class="form-label" style="display:flex;align-items:center;gap:8px;cursor:pointer">
|
||||||
|
<input type="checkbox" id="job-enabled" ${job.enabled ? 'checked' : ''}
|
||||||
|
style="accent-color:var(--accent);width:13px;height:13px">
|
||||||
|
Enable scheduled runs
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label class="form-label" for="job-schedule">Poll interval (minutes)</label>
|
||||||
|
<input class="form-input" id="job-schedule" type="number" min="1" value="${job.schedule}" style="max-width:100px">
|
||||||
|
</div>
|
||||||
|
${_renderJobConfigFields(job.key, cfg)}
|
||||||
|
<div class="job-actions">
|
||||||
|
<button class="btn btn-secondary" onclick="saveJobDetail(${job.id})">Save</button>
|
||||||
|
<button class="btn btn-secondary" id="job-run-btn" onclick="runJobNow(${job.id})">Run Now</button>
|
||||||
|
</div>
|
||||||
|
<div class="detail-section-title" style="margin:28px 0 10px">Run History</div>
|
||||||
|
${_renderRunList(job.runs)}
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function _renderJobConfigFields(key, cfg) {
|
||||||
|
if (key === 'tailscale_sync') return `
|
||||||
|
<div class="form-group">
|
||||||
|
<label class="form-label" for="job-cfg-tailnet">Tailnet</label>
|
||||||
|
<input class="form-input" id="job-cfg-tailnet" type="text"
|
||||||
|
placeholder="e.g. Tt3Btpm6D921CNTRL" value="${esc(cfg.tailnet ?? '')}">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label class="form-label" for="job-cfg-api-key">API Key</label>
|
||||||
|
<input class="form-input" id="job-cfg-api-key" type="password"
|
||||||
|
placeholder="tskey-api-…" value="${esc(cfg.api_key ?? '')}">
|
||||||
|
</div>`;
|
||||||
|
if (key === 'patchmon_sync' || key === 'semaphore_sync') {
|
||||||
|
const label = key === 'semaphore_sync' ? 'API Token (Bearer)' : 'API Token (Basic)';
|
||||||
|
return `
|
||||||
|
<div class="form-group">
|
||||||
|
<label class="form-label" for="job-cfg-api-url">API URL</label>
|
||||||
|
<input class="form-input" id="job-cfg-api-url" type="text"
|
||||||
|
value="${esc(cfg.api_url ?? '')}">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label class="form-label" for="job-cfg-api-token">${label}</label>
|
||||||
|
<input class="form-input" id="job-cfg-api-token" type="password"
|
||||||
|
value="${esc(cfg.api_token ?? '')}">
|
||||||
|
</div>`;
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function _renderRunList(runs) {
|
||||||
|
if (!runs?.length) return '<div class="run-empty">No runs yet</div>';
|
||||||
|
return `<div class="run-list">${runs.map(r => `
|
||||||
|
<div class="run-item">
|
||||||
|
<span class="job-dot job-dot--${r.status}"></span>
|
||||||
|
<span class="run-time">${fmtDateFull(r.started_at)}</span>
|
||||||
|
<span class="run-status">${esc(r.status)}</span>
|
||||||
|
<span class="run-result">${esc(r.result)}</span>
|
||||||
|
</div>`).join('')}</div>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveJobDetail(jobId) {
|
||||||
|
const enabled = document.getElementById('job-enabled').checked;
|
||||||
|
const schedule = document.getElementById('job-schedule').value;
|
||||||
|
const cfg = {};
|
||||||
|
const tailnet = document.getElementById('job-cfg-tailnet');
|
||||||
|
const apiKey = document.getElementById('job-cfg-api-key');
|
||||||
|
const apiUrl = document.getElementById('job-cfg-api-url');
|
||||||
|
const apiToken = document.getElementById('job-cfg-api-token');
|
||||||
|
if (tailnet) cfg.tailnet = tailnet.value.trim();
|
||||||
|
if (apiKey) cfg.api_key = apiKey.value;
|
||||||
|
if (apiUrl) cfg.api_url = apiUrl.value.trim();
|
||||||
|
if (apiToken) cfg.api_token = apiToken.value;
|
||||||
|
const res = await fetch(`/api/jobs/${jobId}`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ enabled, schedule: parseInt(schedule, 10), config: cfg }),
|
||||||
|
});
|
||||||
|
if (res.ok) { showToast('Job saved', 'success'); loadJobDetail(jobId); }
|
||||||
|
else { showToast('Failed to save', 'error'); }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runJobNow(jobId) {
|
||||||
|
const btn = document.getElementById('job-run-btn');
|
||||||
|
btn.disabled = true;
|
||||||
|
btn.textContent = 'Running…';
|
||||||
|
try {
|
||||||
|
const res = await fetch(`/api/jobs/${jobId}/run`, { method: 'POST' });
|
||||||
|
const data = await res.json();
|
||||||
|
if (res.ok) { showToast(`Done — ${data.summary}`, 'success'); loadJobDetail(jobId); }
|
||||||
|
else { showToast(data.error ?? 'Run failed', 'error'); }
|
||||||
|
} catch { showToast('Run failed', 'error'); }
|
||||||
|
finally { btn.disabled = false; btn.textContent = 'Run Now'; }
|
||||||
|
}
|
||||||
|
|
||||||
|
function _updateJobsNavDot(jobs) {
|
||||||
|
const dot = document.getElementById('nav-jobs-dot');
|
||||||
|
const cls = jobs.some(j => j.last_status === 'error') ? 'error'
|
||||||
|
: jobs.some(j => j.last_status === 'success') ? 'success'
|
||||||
|
: 'none';
|
||||||
|
dot.className = `nav-job-dot nav-job-dot--${cls}`;
|
||||||
|
}
|
||||||
|
|||||||
1
js/version.js
Normal file
1
js/version.js
Normal file
@@ -0,0 +1 @@
|
|||||||
|
const VERSION = "1.5.0";
|
||||||
19
nginx.conf
19
nginx.conf
@@ -1,19 +0,0 @@
|
|||||||
server {
|
|
||||||
listen 80;
|
|
||||||
root /usr/share/nginx/html;
|
|
||||||
index index.html;
|
|
||||||
|
|
||||||
# SPA fallback for client-side routing
|
|
||||||
location / {
|
|
||||||
try_files $uri $uri/ /index.html;
|
|
||||||
}
|
|
||||||
|
|
||||||
location ~* \.(css|js)$ {
|
|
||||||
expires 1y;
|
|
||||||
add_header Cache-Control "public, immutable";
|
|
||||||
}
|
|
||||||
|
|
||||||
location = /index.html {
|
|
||||||
add_header Cache-Control "no-store";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1482
package-lock.json
generated
1482
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,15 +1,20 @@
|
|||||||
{
|
{
|
||||||
"name": "catalyst",
|
"name": "catalyst",
|
||||||
"version": "1.0.3",
|
"version": "1.5.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
"start": "node server/server.js",
|
||||||
"test": "vitest run",
|
"test": "vitest run",
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest",
|
||||||
"version:write": "node -e \"const {version}=JSON.parse(require('fs').readFileSync('package.json','utf8'));require('fs').writeFileSync('js/version.js','const VERSION = \\\"'+version+'\\\";\\n');\""
|
"version:write": "node -e \"const {version}=JSON.parse(require('fs').readFileSync('package.json','utf8'));require('fs').writeFileSync('js/version.js','const VERSION = \\\"'+version+'\\\";\\n');\""
|
||||||
},
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"express": "^4.18.0",
|
||||||
|
"helmet": "^8.1.0"
|
||||||
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"vitest": "^2.0.0",
|
"jsdom": "^25.0.0",
|
||||||
"sql.js": "^1.10.2",
|
"supertest": "^7.0.0",
|
||||||
"jsdom": "^25.0.0"
|
"vitest": "^3.2.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
347
server/db.js
Normal file
347
server/db.js
Normal file
@@ -0,0 +1,347 @@
|
|||||||
|
import { DatabaseSync } from 'node:sqlite';
|
||||||
|
import { mkdirSync } from 'fs';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||||
|
const DEFAULT_PATH = join(__dirname, '../data/catalyst.db');
|
||||||
|
|
||||||
|
let db;
|
||||||
|
|
||||||
|
function init(path) {
|
||||||
|
if (path !== ':memory:') {
|
||||||
|
mkdirSync(dirname(path), { recursive: true });
|
||||||
|
}
|
||||||
|
db = new DatabaseSync(path);
|
||||||
|
db.exec('PRAGMA journal_mode = WAL');
|
||||||
|
db.exec('PRAGMA foreign_keys = ON');
|
||||||
|
db.exec('PRAGMA synchronous = NORMAL');
|
||||||
|
createSchema();
|
||||||
|
if (path !== ':memory:') { seed(); seedJobs(); }
|
||||||
|
}
|
||||||
|
|
||||||
|
function createSchema() {
|
||||||
|
db.exec(`
|
||||||
|
CREATE TABLE IF NOT EXISTS instances (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL CHECK(length(name) BETWEEN 1 AND 100),
|
||||||
|
state TEXT NOT NULL DEFAULT 'deployed'
|
||||||
|
CHECK(state IN ('deployed','testing','degraded')),
|
||||||
|
stack TEXT NOT NULL DEFAULT 'development'
|
||||||
|
CHECK(stack IN ('production','development')),
|
||||||
|
vmid INTEGER NOT NULL UNIQUE CHECK(vmid > 0),
|
||||||
|
atlas INTEGER NOT NULL DEFAULT 0 CHECK(atlas IN (0,1)),
|
||||||
|
argus INTEGER NOT NULL DEFAULT 0 CHECK(argus IN (0,1)),
|
||||||
|
semaphore INTEGER NOT NULL DEFAULT 0 CHECK(semaphore IN (0,1)),
|
||||||
|
patchmon INTEGER NOT NULL DEFAULT 0 CHECK(patchmon IN (0,1)),
|
||||||
|
tailscale INTEGER NOT NULL DEFAULT 0 CHECK(tailscale IN (0,1)),
|
||||||
|
andromeda INTEGER NOT NULL DEFAULT 0 CHECK(andromeda IN (0,1)),
|
||||||
|
tailscale_ip TEXT NOT NULL DEFAULT '',
|
||||||
|
hardware_acceleration INTEGER NOT NULL DEFAULT 0 CHECK(hardware_acceleration IN (0,1)),
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_instances_state ON instances(state);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_instances_stack ON instances(stack);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS instance_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
vmid INTEGER NOT NULL,
|
||||||
|
field TEXT NOT NULL,
|
||||||
|
old_value TEXT,
|
||||||
|
new_value TEXT,
|
||||||
|
changed_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_history_vmid ON instance_history(vmid);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS config (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL DEFAULT ''
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS jobs (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
key TEXT NOT NULL UNIQUE,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
description TEXT NOT NULL DEFAULT '',
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 0 CHECK(enabled IN (0,1)),
|
||||||
|
schedule INTEGER NOT NULL DEFAULT 15,
|
||||||
|
config TEXT NOT NULL DEFAULT '{}'
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS job_runs (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
job_id INTEGER NOT NULL,
|
||||||
|
started_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
ended_at TEXT,
|
||||||
|
status TEXT NOT NULL DEFAULT 'running' CHECK(status IN ('running','success','error')),
|
||||||
|
result TEXT NOT NULL DEFAULT ''
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_job_runs_job_id ON job_runs(job_id);
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const SEED = [
|
||||||
|
{ name: 'plex', state: 'deployed', stack: 'production', vmid: 117, atlas: 1, argus: 1, semaphore: 0, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.1', hardware_acceleration: 1 },
|
||||||
|
{ name: 'foldergram', state: 'testing', stack: 'development', vmid: 137, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 },
|
||||||
|
{ name: 'homeassistant', state: 'deployed', stack: 'production', vmid: 102, atlas: 1, argus: 1, semaphore: 1, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.5', hardware_acceleration: 0 },
|
||||||
|
{ name: 'gitea', state: 'deployed', stack: 'production', vmid: 110, atlas: 1, argus: 0, semaphore: 1, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.8', hardware_acceleration: 0 },
|
||||||
|
{ name: 'postgres-primary', state: 'degraded', stack: 'production', vmid: 201, atlas: 1, argus: 1, semaphore: 0, patchmon: 1, tailscale: 0, andromeda: 1, tailscale_ip: '', hardware_acceleration: 0 },
|
||||||
|
{ name: 'nextcloud', state: 'testing', stack: 'development', vmid: 144, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.12', hardware_acceleration: 0 },
|
||||||
|
{ name: 'traefik', state: 'deployed', stack: 'production', vmid: 100, atlas: 1, argus: 1, semaphore: 0, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.2', hardware_acceleration: 0 },
|
||||||
|
{ name: 'monitoring-stack', state: 'testing', stack: 'development', vmid: 155, atlas: 0, argus: 0, semaphore: 1, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 },
|
||||||
|
];
|
||||||
|
|
||||||
|
function seed() {
|
||||||
|
const count = db.prepare('SELECT COUNT(*) as n FROM instances').get().n;
|
||||||
|
if (count > 0) return;
|
||||||
|
const insert = db.prepare(`
|
||||||
|
INSERT INTO instances
|
||||||
|
(name, state, stack, vmid, atlas, argus, semaphore, patchmon,
|
||||||
|
tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||||
|
VALUES
|
||||||
|
(@name, @state, @stack, @vmid, @atlas, @argus, @semaphore, @patchmon,
|
||||||
|
@tailscale, @andromeda, @tailscale_ip, @hardware_acceleration)
|
||||||
|
`);
|
||||||
|
db.exec('BEGIN');
|
||||||
|
for (const s of SEED) insert.run(s);
|
||||||
|
db.exec('COMMIT');
|
||||||
|
}
|
||||||
|
|
||||||
|
function seedJobs() {
|
||||||
|
const upsert = db.prepare(`
|
||||||
|
INSERT OR IGNORE INTO jobs (key, name, description, enabled, schedule, config)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
|
`);
|
||||||
|
|
||||||
|
const apiKey = getConfig('tailscale_api_key');
|
||||||
|
const tailnet = getConfig('tailscale_tailnet');
|
||||||
|
const tsSchedule = parseInt(getConfig('tailscale_poll_minutes', '15'), 10) || 15;
|
||||||
|
const tsEnabled = getConfig('tailscale_enabled') === '1' ? 1 : 0;
|
||||||
|
upsert.run('tailscale_sync', 'Tailscale Sync',
|
||||||
|
'Syncs Tailscale device status and IPs to instances by matching hostnames.',
|
||||||
|
tsEnabled, tsSchedule, JSON.stringify({ api_key: apiKey, tailnet }));
|
||||||
|
|
||||||
|
upsert.run('patchmon_sync', 'Patchmon Sync',
|
||||||
|
'Syncs Patchmon host registration status to instances by matching hostnames.',
|
||||||
|
0, 60, JSON.stringify({ api_url: 'http://patchmon:3000/api/v1/api/hosts', api_token: '' }));
|
||||||
|
|
||||||
|
upsert.run('semaphore_sync', 'Semaphore Sync',
|
||||||
|
'Syncs Semaphore inventory membership to instances by matching hostnames.',
|
||||||
|
0, 60, JSON.stringify({ api_url: 'http://semaphore:3000/api/project/1/inventory/1', api_token: '' }));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Queries ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function getInstances(filters = {}) {
|
||||||
|
const parts = ['SELECT * FROM instances WHERE 1=1'];
|
||||||
|
const params = {};
|
||||||
|
if (filters.search) {
|
||||||
|
parts.push('AND (name LIKE @search OR CAST(vmid AS TEXT) LIKE @search OR stack LIKE @search)');
|
||||||
|
params.search = `%${filters.search}%`;
|
||||||
|
}
|
||||||
|
if (filters.state) { parts.push('AND state = @state'); params.state = filters.state; }
|
||||||
|
if (filters.stack) { parts.push('AND stack = @stack'); params.stack = filters.stack; }
|
||||||
|
parts.push('ORDER BY name ASC');
|
||||||
|
return db.prepare(parts.join(' ')).all(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getInstance(vmid) {
|
||||||
|
return db.prepare('SELECT * FROM instances WHERE vmid = ?').get(vmid) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDistinctStacks() {
|
||||||
|
return db.prepare(`SELECT DISTINCT stack FROM instances WHERE stack != '' ORDER BY stack`)
|
||||||
|
.all().map(r => r.stack);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Mutations ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const HISTORY_FIELDS = [
|
||||||
|
'name', 'state', 'stack', 'vmid', 'tailscale_ip',
|
||||||
|
'atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda',
|
||||||
|
'hardware_acceleration',
|
||||||
|
];
|
||||||
|
|
||||||
|
export function createInstance(data) {
|
||||||
|
db.prepare(`
|
||||||
|
INSERT INTO instances
|
||||||
|
(name, state, stack, vmid, atlas, argus, semaphore, patchmon,
|
||||||
|
tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||||
|
VALUES
|
||||||
|
(@name, @state, @stack, @vmid, @atlas, @argus, @semaphore, @patchmon,
|
||||||
|
@tailscale, @andromeda, @tailscale_ip, @hardware_acceleration)
|
||||||
|
`).run(data);
|
||||||
|
db.prepare(
|
||||||
|
`INSERT INTO instance_history (vmid, field, old_value, new_value) VALUES (?, 'created', NULL, NULL)`
|
||||||
|
).run(data.vmid);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateInstance(vmid, data) {
|
||||||
|
const old = getInstance(vmid);
|
||||||
|
db.prepare(`
|
||||||
|
UPDATE instances SET
|
||||||
|
name=@name, state=@state, stack=@stack, vmid=@newVmid,
|
||||||
|
atlas=@atlas, argus=@argus, semaphore=@semaphore, patchmon=@patchmon,
|
||||||
|
tailscale=@tailscale, andromeda=@andromeda, tailscale_ip=@tailscale_ip,
|
||||||
|
hardware_acceleration=@hardware_acceleration, updated_at=datetime('now')
|
||||||
|
WHERE vmid=@vmid
|
||||||
|
`).run({ ...data, newVmid: data.vmid, vmid });
|
||||||
|
const newVmid = data.vmid;
|
||||||
|
const insertEvt = db.prepare(
|
||||||
|
`INSERT INTO instance_history (vmid, field, old_value, new_value) VALUES (?, ?, ?, ?)`
|
||||||
|
);
|
||||||
|
for (const field of HISTORY_FIELDS) {
|
||||||
|
const oldVal = String(old[field] ?? '');
|
||||||
|
const newVal = String(field === 'vmid' ? newVmid : (data[field] ?? ''));
|
||||||
|
if (oldVal !== newVal) insertEvt.run(newVmid, field, oldVal, newVal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteInstance(vmid) {
|
||||||
|
db.prepare('DELETE FROM instance_history WHERE vmid = ?').run(vmid);
|
||||||
|
db.prepare('DELETE FROM instances WHERE vmid = ?').run(vmid);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function importInstances(rows, historyRows = []) {
|
||||||
|
db.exec('BEGIN');
|
||||||
|
db.exec('DELETE FROM instance_history');
|
||||||
|
db.exec('DELETE FROM instances');
|
||||||
|
const insert = db.prepare(`
|
||||||
|
INSERT INTO instances
|
||||||
|
(name, state, stack, vmid, atlas, argus, semaphore, patchmon,
|
||||||
|
tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||||
|
VALUES
|
||||||
|
(@name, @state, @stack, @vmid, @atlas, @argus, @semaphore, @patchmon,
|
||||||
|
@tailscale, @andromeda, @tailscale_ip, @hardware_acceleration)
|
||||||
|
`);
|
||||||
|
for (const row of rows) insert.run(row);
|
||||||
|
if (historyRows.length) {
|
||||||
|
const insertHist = db.prepare(
|
||||||
|
`INSERT INTO instance_history (vmid, field, old_value, new_value, changed_at) VALUES (?, ?, ?, ?, ?)`
|
||||||
|
);
|
||||||
|
for (const h of historyRows) insertHist.run(h.vmid, h.field, h.old_value ?? null, h.new_value ?? null, h.changed_at);
|
||||||
|
}
|
||||||
|
db.exec('COMMIT');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getInstanceHistory(vmid) {
|
||||||
|
return db.prepare(
|
||||||
|
'SELECT * FROM instance_history WHERE vmid = ? ORDER BY changed_at DESC'
|
||||||
|
).all(vmid);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAllHistory() {
|
||||||
|
return db.prepare('SELECT * FROM instance_history ORDER BY vmid, changed_at').all();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAllJobs() {
|
||||||
|
return db.prepare('SELECT id, key, name, description, enabled, schedule, config FROM jobs ORDER BY id').all();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAllJobRuns() {
|
||||||
|
return db.prepare('SELECT * FROM job_runs ORDER BY job_id, id').all();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function importJobs(jobRows, jobRunRows = []) {
|
||||||
|
db.exec('BEGIN');
|
||||||
|
db.exec('DELETE FROM job_runs');
|
||||||
|
db.exec('DELETE FROM jobs');
|
||||||
|
const insertJob = db.prepare(`
|
||||||
|
INSERT INTO jobs (id, key, name, description, enabled, schedule, config)
|
||||||
|
VALUES (@id, @key, @name, @description, @enabled, @schedule, @config)
|
||||||
|
`);
|
||||||
|
for (const j of jobRows) insertJob.run(j);
|
||||||
|
if (jobRunRows.length) {
|
||||||
|
const insertRun = db.prepare(`
|
||||||
|
INSERT INTO job_runs (id, job_id, started_at, ended_at, status, result)
|
||||||
|
VALUES (@id, @job_id, @started_at, @ended_at, @status, @result)
|
||||||
|
`);
|
||||||
|
for (const r of jobRunRows) insertRun.run(r);
|
||||||
|
}
|
||||||
|
db.exec('COMMIT');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getConfig(key, defaultVal = '') {
|
||||||
|
const row = db.prepare('SELECT value FROM config WHERE key = ?').get(key);
|
||||||
|
return row ? row.value : defaultVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function setConfig(key, value) {
|
||||||
|
db.prepare(
|
||||||
|
`INSERT INTO config (key, value) VALUES (?, ?)
|
||||||
|
ON CONFLICT(key) DO UPDATE SET value = excluded.value`
|
||||||
|
).run(key, String(value));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Jobs ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const JOB_WITH_LAST_RUN = `
|
||||||
|
SELECT j.*,
|
||||||
|
r.id AS last_run_id,
|
||||||
|
r.started_at AS last_run_at,
|
||||||
|
r.status AS last_status,
|
||||||
|
r.result AS last_result
|
||||||
|
FROM jobs j
|
||||||
|
LEFT JOIN job_runs r
|
||||||
|
ON r.id = (SELECT id FROM job_runs WHERE job_id = j.id ORDER BY id DESC LIMIT 1)
|
||||||
|
`;
|
||||||
|
|
||||||
|
export function getJobs() {
|
||||||
|
return db.prepare(JOB_WITH_LAST_RUN + ' ORDER BY j.id').all();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getJob(id) {
|
||||||
|
return db.prepare(JOB_WITH_LAST_RUN + ' WHERE j.id = ?').get(id) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createJob(data) {
|
||||||
|
db.prepare(`
|
||||||
|
INSERT INTO jobs (key, name, description, enabled, schedule, config)
|
||||||
|
VALUES (@key, @name, @description, @enabled, @schedule, @config)
|
||||||
|
`).run(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateJob(id, { enabled, schedule, config }) {
|
||||||
|
db.prepare(`
|
||||||
|
UPDATE jobs SET enabled=@enabled, schedule=@schedule, config=@config WHERE id=@id
|
||||||
|
`).run({ id, enabled, schedule, config });
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createJobRun(jobId) {
|
||||||
|
return Number(db.prepare('INSERT INTO job_runs (job_id) VALUES (?)').run(jobId).lastInsertRowid);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function completeJobRun(runId, status, result) {
|
||||||
|
db.prepare(`
|
||||||
|
UPDATE job_runs SET ended_at=datetime('now'), status=@status, result=@result WHERE id=@id
|
||||||
|
`).run({ id: runId, status, result });
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getJobRuns(jobId) {
|
||||||
|
return db.prepare('SELECT * FROM job_runs WHERE job_id = ? ORDER BY id DESC').all(jobId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Test helpers ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function _resetForTest() {
|
||||||
|
if (db) db.close();
|
||||||
|
init(':memory:');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Boot ──────────────────────────────────────────────────────────────────────
|
||||||
|
// Skipped in test environment — parallel Vitest workers would race to open
|
||||||
|
// the same file, causing "database is locked". _resetForTest() in beforeEach
|
||||||
|
// handles initialisation for every test worker using :memory: instead.
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV !== 'test') {
|
||||||
|
const DB_PATH = process.env.DB_PATH ?? DEFAULT_PATH;
|
||||||
|
try {
|
||||||
|
init(DB_PATH);
|
||||||
|
} catch (e) {
|
||||||
|
console.error('[catalyst] fatal: could not open database at', DB_PATH);
|
||||||
|
console.error('[catalyst] ensure the data directory exists and is writable by the server process.');
|
||||||
|
console.error(e);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
141
server/jobs.js
Normal file
141
server/jobs.js
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import { getJobs, getJob, getInstances, updateInstance, createJobRun, completeJobRun } from './db.js';
|
||||||
|
|
||||||
|
// ── Handlers ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const TAILSCALE_API = 'https://api.tailscale.com/api/v2';
|
||||||
|
|
||||||
|
async function tailscaleSyncHandler(cfg) {
|
||||||
|
const { api_key, tailnet } = cfg;
|
||||||
|
if (!api_key || !tailnet) throw new Error('Tailscale not configured — set API key and tailnet');
|
||||||
|
|
||||||
|
const res = await fetch(
|
||||||
|
`${TAILSCALE_API}/tailnet/${encodeURIComponent(tailnet)}/devices`,
|
||||||
|
{ headers: { Authorization: `Bearer ${api_key}` } }
|
||||||
|
);
|
||||||
|
if (!res.ok) throw new Error(`Tailscale API ${res.status}`);
|
||||||
|
|
||||||
|
const { devices } = await res.json();
|
||||||
|
const tsMap = new Map(
|
||||||
|
devices.map(d => [d.hostname, (d.addresses ?? []).find(a => a.startsWith('100.')) ?? ''])
|
||||||
|
);
|
||||||
|
|
||||||
|
const instances = getInstances();
|
||||||
|
let updated = 0;
|
||||||
|
for (const inst of instances) {
|
||||||
|
const tsIp = tsMap.get(inst.name);
|
||||||
|
const matched = tsIp !== undefined;
|
||||||
|
const newTailscale = matched ? 1 : (inst.tailscale === 1 ? 0 : inst.tailscale);
|
||||||
|
const newIp = matched ? tsIp : (inst.tailscale === 1 ? '' : inst.tailscale_ip);
|
||||||
|
if (newTailscale !== inst.tailscale || newIp !== inst.tailscale_ip) {
|
||||||
|
const { id: _id, created_at: _ca, updated_at: _ua, ...instData } = inst;
|
||||||
|
updateInstance(inst.vmid, { ...instData, tailscale: newTailscale, tailscale_ip: newIp });
|
||||||
|
updated++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { summary: `${updated} updated of ${instances.length}` };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Patchmon Sync ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async function patchmonSyncHandler(cfg) {
|
||||||
|
const { api_url, api_token } = cfg;
|
||||||
|
if (!api_url || !api_token) throw new Error('Patchmon not configured — set API URL and token');
|
||||||
|
|
||||||
|
const res = await fetch(api_url, {
|
||||||
|
headers: { Authorization: `Basic ${api_token}` },
|
||||||
|
});
|
||||||
|
if (!res.ok) throw new Error(`Patchmon API ${res.status}`);
|
||||||
|
|
||||||
|
const data = await res.json();
|
||||||
|
const items = Array.isArray(data) ? data : (data.hosts ?? data.data ?? []);
|
||||||
|
const hostSet = new Set(
|
||||||
|
items.map(h => (typeof h === 'string' ? h : (h.name ?? h.hostname ?? h.host ?? '')))
|
||||||
|
.filter(Boolean)
|
||||||
|
);
|
||||||
|
|
||||||
|
const instances = getInstances();
|
||||||
|
let updated = 0;
|
||||||
|
for (const inst of instances) {
|
||||||
|
const newPatchmon = hostSet.has(inst.name) ? 1 : 0;
|
||||||
|
if (newPatchmon !== inst.patchmon) {
|
||||||
|
const { id: _id, created_at: _ca, updated_at: _ua, ...instData } = inst;
|
||||||
|
updateInstance(inst.vmid, { ...instData, patchmon: newPatchmon });
|
||||||
|
updated++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { summary: `${updated} updated of ${instances.length}` };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Semaphore Sync ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async function semaphoreSyncHandler(cfg) {
|
||||||
|
const { api_url, api_token } = cfg;
|
||||||
|
if (!api_url || !api_token) throw new Error('Semaphore not configured — set API URL and token');
|
||||||
|
|
||||||
|
const res = await fetch(api_url, {
|
||||||
|
headers: { Authorization: `Bearer ${api_token}` },
|
||||||
|
});
|
||||||
|
if (!res.ok) throw new Error(`Semaphore API ${res.status}`);
|
||||||
|
|
||||||
|
const data = await res.json();
|
||||||
|
// Inventory is an Ansible INI string; extract bare hostnames
|
||||||
|
const hostSet = new Set(
|
||||||
|
(data.inventory ?? '').split('\n')
|
||||||
|
.map(l => l.trim())
|
||||||
|
.filter(l => l && !l.startsWith('[') && !l.startsWith('#') && !l.startsWith(';'))
|
||||||
|
.map(l => l.split(/[\s=]/)[0])
|
||||||
|
.filter(Boolean)
|
||||||
|
);
|
||||||
|
|
||||||
|
const instances = getInstances();
|
||||||
|
let updated = 0;
|
||||||
|
for (const inst of instances) {
|
||||||
|
const newSemaphore = hostSet.has(inst.name) ? 1 : 0;
|
||||||
|
if (newSemaphore !== inst.semaphore) {
|
||||||
|
const { id: _id, created_at: _ca, updated_at: _ua, ...instData } = inst;
|
||||||
|
updateInstance(inst.vmid, { ...instData, semaphore: newSemaphore });
|
||||||
|
updated++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { summary: `${updated} updated of ${instances.length}` };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Registry ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const HANDLERS = {
|
||||||
|
tailscale_sync: tailscaleSyncHandler,
|
||||||
|
patchmon_sync: patchmonSyncHandler,
|
||||||
|
semaphore_sync: semaphoreSyncHandler,
|
||||||
|
};
|
||||||
|
|
||||||
|
// ── Public API ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export async function runJob(jobId) {
|
||||||
|
const job = getJob(jobId);
|
||||||
|
if (!job) throw new Error('Job not found');
|
||||||
|
const handler = HANDLERS[job.key];
|
||||||
|
if (!handler) throw new Error(`No handler for '${job.key}'`);
|
||||||
|
const cfg = JSON.parse(job.config || '{}');
|
||||||
|
const runId = createJobRun(jobId);
|
||||||
|
try {
|
||||||
|
const result = await handler(cfg);
|
||||||
|
completeJobRun(runId, 'success', result.summary ?? '');
|
||||||
|
return result;
|
||||||
|
} catch (e) {
|
||||||
|
completeJobRun(runId, 'error', e.message);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const _intervals = new Map();
|
||||||
|
|
||||||
|
export function restartJobs() {
|
||||||
|
for (const iv of _intervals.values()) clearInterval(iv);
|
||||||
|
_intervals.clear();
|
||||||
|
for (const job of getJobs()) {
|
||||||
|
if (!job.enabled) continue;
|
||||||
|
const ms = Math.max(1, job.schedule || 15) * 60_000;
|
||||||
|
const id = job.id;
|
||||||
|
_intervals.set(id, setInterval(() => runJob(id).catch(() => {}), ms));
|
||||||
|
}
|
||||||
|
}
|
||||||
229
server/routes.js
Normal file
229
server/routes.js
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
import { Router } from 'express';
|
||||||
|
import {
|
||||||
|
getInstances, getInstance, getDistinctStacks,
|
||||||
|
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory, getAllHistory,
|
||||||
|
getConfig, setConfig, getJobs, getJob, updateJob, getJobRuns,
|
||||||
|
getAllJobs, getAllJobRuns, importJobs,
|
||||||
|
} from './db.js';
|
||||||
|
import { runJob, restartJobs } from './jobs.js';
|
||||||
|
|
||||||
|
export const router = Router();
|
||||||
|
|
||||||
|
// ── Validation ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const VALID_STATES = ['deployed', 'testing', 'degraded'];
|
||||||
|
const VALID_STACKS = ['production', 'development'];
|
||||||
|
const SERVICE_KEYS = ['atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda'];
|
||||||
|
|
||||||
|
const REDACTED = '**REDACTED**';
|
||||||
|
|
||||||
|
function maskJob(job) {
|
||||||
|
const cfg = JSON.parse(job.config || '{}');
|
||||||
|
if (cfg.api_key) cfg.api_key = REDACTED;
|
||||||
|
if (cfg.api_token) cfg.api_token = REDACTED;
|
||||||
|
return { ...job, config: cfg };
|
||||||
|
}
|
||||||
|
|
||||||
|
function validate(body) {
|
||||||
|
const errors = [];
|
||||||
|
if (!body.name || typeof body.name !== 'string' || !body.name.trim())
|
||||||
|
errors.push('name is required');
|
||||||
|
if (!Number.isInteger(body.vmid) || body.vmid < 1)
|
||||||
|
errors.push('vmid must be a positive integer');
|
||||||
|
if (!VALID_STATES.includes(body.state))
|
||||||
|
errors.push(`state must be one of: ${VALID_STATES.join(', ')}`);
|
||||||
|
if (!VALID_STACKS.includes(body.stack))
|
||||||
|
errors.push(`stack must be one of: ${VALID_STACKS.join(', ')}`);
|
||||||
|
const ip = (body.tailscale_ip ?? '').trim();
|
||||||
|
if (ip && !/^(\d{1,3}\.){3}\d{1,3}$/.test(ip))
|
||||||
|
errors.push('tailscale_ip must be a valid IPv4 address or empty');
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDbError(context, e, res) {
|
||||||
|
if (e.message.includes('UNIQUE')) return res.status(409).json({ error: 'vmid already exists' });
|
||||||
|
if (e.message.includes('CHECK')) return res.status(400).json({ error: 'invalid field value' });
|
||||||
|
console.error(context, e);
|
||||||
|
res.status(500).json({ error: 'internal server error' });
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalise(body) {
|
||||||
|
const row = {
|
||||||
|
name: (body.name ?? '').trim(),
|
||||||
|
state: body.state,
|
||||||
|
stack: body.stack,
|
||||||
|
vmid: body.vmid,
|
||||||
|
tailscale_ip: (body.tailscale_ip ?? '').trim(),
|
||||||
|
hardware_acceleration: body.hardware_acceleration ? 1 : 0,
|
||||||
|
};
|
||||||
|
for (const svc of SERVICE_KEYS) row[svc] = body[svc] ? 1 : 0;
|
||||||
|
return row;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Routes ────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
// GET /api/instances/stacks — must be declared before /:vmid
|
||||||
|
router.get('/instances/stacks', (_req, res) => {
|
||||||
|
res.json(getDistinctStacks());
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/instances
|
||||||
|
router.get('/instances', (req, res) => {
|
||||||
|
const { search, state, stack } = req.query;
|
||||||
|
res.json(getInstances({ search, state, stack }));
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/instances/:vmid/history
|
||||||
|
router.get('/instances/:vmid/history', (req, res) => {
|
||||||
|
const vmid = parseInt(req.params.vmid, 10);
|
||||||
|
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||||
|
if (!getInstance(vmid)) return res.status(404).json({ error: 'instance not found' });
|
||||||
|
res.json(getInstanceHistory(vmid));
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/instances/:vmid
|
||||||
|
router.get('/instances/:vmid', (req, res) => {
|
||||||
|
const vmid = parseInt(req.params.vmid, 10);
|
||||||
|
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||||
|
|
||||||
|
const instance = getInstance(vmid);
|
||||||
|
if (!instance) return res.status(404).json({ error: 'instance not found' });
|
||||||
|
|
||||||
|
res.json(instance);
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/instances
|
||||||
|
router.post('/instances', (req, res) => {
|
||||||
|
const errors = validate(req.body);
|
||||||
|
if (errors.length) return res.status(400).json({ errors });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = normalise(req.body);
|
||||||
|
createInstance(data);
|
||||||
|
const created = getInstance(data.vmid);
|
||||||
|
res.status(201).json(created);
|
||||||
|
} catch (e) {
|
||||||
|
handleDbError('POST /api/instances', e, res);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// PUT /api/instances/:vmid
|
||||||
|
router.put('/instances/:vmid', (req, res) => {
|
||||||
|
const vmid = parseInt(req.params.vmid, 10);
|
||||||
|
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||||
|
if (!getInstance(vmid)) return res.status(404).json({ error: 'instance not found' });
|
||||||
|
|
||||||
|
const errors = validate(req.body);
|
||||||
|
if (errors.length) return res.status(400).json({ errors });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = normalise(req.body);
|
||||||
|
updateInstance(vmid, data);
|
||||||
|
res.json(getInstance(data.vmid));
|
||||||
|
} catch (e) {
|
||||||
|
handleDbError('PUT /api/instances/:vmid', e, res);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/export
|
||||||
|
router.get('/export', (_req, res) => {
|
||||||
|
const instances = getInstances();
|
||||||
|
const history = getAllHistory();
|
||||||
|
const jobs = getAllJobs();
|
||||||
|
const job_runs = getAllJobRuns();
|
||||||
|
const date = new Date().toISOString().slice(0, 10);
|
||||||
|
res.setHeader('Content-Disposition', `attachment; filename="catalyst-backup-${date}.json"`);
|
||||||
|
res.json({ version: 3, exported_at: new Date().toISOString(), instances, history, jobs, job_runs });
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/import
|
||||||
|
router.post('/import', (req, res) => {
|
||||||
|
const { instances, history = [], jobs, job_runs } = req.body ?? {};
|
||||||
|
if (!Array.isArray(instances)) {
|
||||||
|
return res.status(400).json({ error: 'body must contain an instances array' });
|
||||||
|
}
|
||||||
|
const errors = [];
|
||||||
|
for (const [i, row] of instances.entries()) {
|
||||||
|
const errs = validate(normalise(row));
|
||||||
|
if (errs.length) errors.push({ index: i, errors: errs });
|
||||||
|
}
|
||||||
|
if (errors.length) return res.status(400).json({ errors });
|
||||||
|
try {
|
||||||
|
importInstances(instances.map(normalise), Array.isArray(history) ? history : []);
|
||||||
|
if (Array.isArray(jobs)) {
|
||||||
|
importJobs(jobs, Array.isArray(job_runs) ? job_runs : []);
|
||||||
|
try { restartJobs(); } catch (e) { console.error('POST /api/import restartJobs', e); }
|
||||||
|
}
|
||||||
|
res.json({
|
||||||
|
imported: instances.length,
|
||||||
|
imported_jobs: Array.isArray(jobs) ? jobs.length : undefined,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
console.error('POST /api/import', e);
|
||||||
|
res.status(500).json({ error: 'internal server error' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// DELETE /api/instances/:vmid
|
||||||
|
router.delete('/instances/:vmid', (req, res) => {
|
||||||
|
const vmid = parseInt(req.params.vmid, 10);
|
||||||
|
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||||
|
|
||||||
|
const instance = getInstance(vmid);
|
||||||
|
if (!instance) return res.status(404).json({ error: 'instance not found' });
|
||||||
|
if (instance.stack !== 'development')
|
||||||
|
return res.status(422).json({ error: 'only development instances can be deleted' });
|
||||||
|
|
||||||
|
try {
|
||||||
|
deleteInstance(vmid);
|
||||||
|
res.status(204).end();
|
||||||
|
} catch (e) {
|
||||||
|
handleDbError('DELETE /api/instances/:vmid', e, res);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/jobs
|
||||||
|
router.get('/jobs', (_req, res) => {
|
||||||
|
res.json(getJobs().map(maskJob));
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/jobs/:id
|
||||||
|
router.get('/jobs/:id', (req, res) => {
|
||||||
|
const id = parseInt(req.params.id, 10);
|
||||||
|
if (!id) return res.status(400).json({ error: 'invalid id' });
|
||||||
|
const job = getJob(id);
|
||||||
|
if (!job) return res.status(404).json({ error: 'job not found' });
|
||||||
|
res.json({ ...maskJob(job), runs: getJobRuns(id) });
|
||||||
|
});
|
||||||
|
|
||||||
|
// PUT /api/jobs/:id
|
||||||
|
router.put('/jobs/:id', (req, res) => {
|
||||||
|
const id = parseInt(req.params.id, 10);
|
||||||
|
if (!id) return res.status(400).json({ error: 'invalid id' });
|
||||||
|
const job = getJob(id);
|
||||||
|
if (!job) return res.status(404).json({ error: 'job not found' });
|
||||||
|
const { enabled, schedule, config: newCfg } = req.body ?? {};
|
||||||
|
const existingCfg = JSON.parse(job.config || '{}');
|
||||||
|
const mergedCfg = { ...existingCfg, ...(newCfg ?? {}) };
|
||||||
|
if (newCfg?.api_key === REDACTED) mergedCfg.api_key = existingCfg.api_key;
|
||||||
|
if (newCfg?.api_token === REDACTED) mergedCfg.api_token = existingCfg.api_token;
|
||||||
|
updateJob(id, {
|
||||||
|
enabled: enabled != null ? (enabled ? 1 : 0) : job.enabled,
|
||||||
|
schedule: schedule != null ? (parseInt(schedule, 10) || 15) : job.schedule,
|
||||||
|
config: JSON.stringify(mergedCfg),
|
||||||
|
});
|
||||||
|
try { restartJobs(); } catch (e) { console.error('PUT /api/jobs/:id restartJobs', e); }
|
||||||
|
res.json(maskJob(getJob(id)));
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/jobs/:id/run
|
||||||
|
router.post('/jobs/:id/run', async (req, res) => {
|
||||||
|
const id = parseInt(req.params.id, 10);
|
||||||
|
if (!id) return res.status(400).json({ error: 'invalid id' });
|
||||||
|
if (!getJob(id)) return res.status(404).json({ error: 'job not found' });
|
||||||
|
try {
|
||||||
|
res.json(await runJob(id));
|
||||||
|
} catch (e) {
|
||||||
|
handleDbError('POST /api/jobs/:id/run', e, res);
|
||||||
|
}
|
||||||
|
});
|
||||||
53
server/server.js
Normal file
53
server/server.js
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import helmet from 'helmet';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { router } from './routes.js';
|
||||||
|
import { restartJobs } from './jobs.js';
|
||||||
|
|
||||||
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||||
|
const PORT = process.env.PORT ?? 3000;
|
||||||
|
|
||||||
|
export const app = express();
|
||||||
|
|
||||||
|
app.use(helmet({
|
||||||
|
contentSecurityPolicy: {
|
||||||
|
useDefaults: false, // explicit — upgrade-insecure-requests breaks HTTP deployments
|
||||||
|
directives: {
|
||||||
|
'default-src': ["'self'"],
|
||||||
|
'base-uri': ["'self'"],
|
||||||
|
'font-src': ["'self'", 'https://fonts.gstatic.com'],
|
||||||
|
'form-action': ["'self'"],
|
||||||
|
'frame-ancestors': ["'self'"],
|
||||||
|
'img-src': ["'self'", 'data:'],
|
||||||
|
'object-src': ["'none'"],
|
||||||
|
'script-src': ["'self'"],
|
||||||
|
'script-src-attr': ["'unsafe-inline'"], // allow onclick handlers
|
||||||
|
'style-src': ["'self'", 'https://fonts.googleapis.com'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// API
|
||||||
|
app.use('/api', router);
|
||||||
|
|
||||||
|
// Static files
|
||||||
|
app.use(express.static(join(__dirname, '..')));
|
||||||
|
|
||||||
|
// SPA fallback — all non-API, non-asset routes serve index.html
|
||||||
|
app.get('*', (req, res) => {
|
||||||
|
res.sendFile(join(__dirname, '../index.html'));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Error handler
|
||||||
|
app.use((err, _req, res, _next) => {
|
||||||
|
console.error(err);
|
||||||
|
res.status(500).json({ error: 'internal server error' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Boot — only when run directly, not when imported by tests
|
||||||
|
if (process.argv[1] === fileURLToPath(import.meta.url)) {
|
||||||
|
restartJobs();
|
||||||
|
app.listen(PORT, () => console.log(`catalyst on :${PORT}`));
|
||||||
|
}
|
||||||
642
tests/api.test.js
Normal file
642
tests/api.test.js
Normal file
@@ -0,0 +1,642 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
|
||||||
|
import request from 'supertest'
|
||||||
|
import { app } from '../server/server.js'
|
||||||
|
import { _resetForTest, createJob } from '../server/db.js'
|
||||||
|
import * as dbModule from '../server/db.js'
|
||||||
|
|
||||||
|
beforeEach(() => _resetForTest())
|
||||||
|
|
||||||
|
const base = {
|
||||||
|
name: 'traefik',
|
||||||
|
vmid: 100,
|
||||||
|
state: 'deployed',
|
||||||
|
stack: 'production',
|
||||||
|
atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0,
|
||||||
|
tailscale_ip: '',
|
||||||
|
hardware_acceleration: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── GET /api/instances ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('GET /api/instances', () => {
|
||||||
|
it('returns empty array when no instances exist', async () => {
|
||||||
|
const res = await request(app).get('/api/instances')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns all instances sorted by name', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'zebra' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'alpha' })
|
||||||
|
const res = await request(app).get('/api/instances')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body).toHaveLength(2)
|
||||||
|
expect(res.body[0].name).toBe('alpha')
|
||||||
|
expect(res.body[1].name).toBe('zebra')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('filters by state', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'a', state: 'deployed' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'b', state: 'degraded' })
|
||||||
|
const res = await request(app).get('/api/instances?state=deployed')
|
||||||
|
expect(res.body).toHaveLength(1)
|
||||||
|
expect(res.body[0].name).toBe('a')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('filters by stack', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'a', stack: 'production' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'b', stack: 'development', state: 'testing' })
|
||||||
|
const res = await request(app).get('/api/instances?stack=development')
|
||||||
|
expect(res.body).toHaveLength(1)
|
||||||
|
expect(res.body[0].name).toBe('b')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('searches by name substring', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'plex' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'gitea' })
|
||||||
|
const res = await request(app).get('/api/instances?search=ple')
|
||||||
|
expect(res.body).toHaveLength(1)
|
||||||
|
expect(res.body[0].name).toBe('plex')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('searches by vmid', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 137, name: 'a' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 200, name: 'b' })
|
||||||
|
const res = await request(app).get('/api/instances?search=137')
|
||||||
|
expect(res.body).toHaveLength(1)
|
||||||
|
expect(res.body[0].vmid).toBe(137)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('combines search and state filters', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'plex', state: 'deployed' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'plex2', state: 'degraded' })
|
||||||
|
const res = await request(app).get('/api/instances?search=plex&state=deployed')
|
||||||
|
expect(res.body).toHaveLength(1)
|
||||||
|
expect(res.body[0].name).toBe('plex')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── GET /api/instances/stacks ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('GET /api/instances/stacks', () => {
|
||||||
|
it('returns empty array when no instances exist', async () => {
|
||||||
|
const res = await request(app).get('/api/instances/stacks')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns unique stacks sorted alphabetically', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'a', stack: 'production' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'b', stack: 'development', state: 'testing' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 3, name: 'c', stack: 'production' })
|
||||||
|
const res = await request(app).get('/api/instances/stacks')
|
||||||
|
expect(res.body).toEqual(['development', 'production'])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── GET /api/instances/:vmid ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('GET /api/instances/:vmid', () => {
|
||||||
|
it('returns the instance for a known vmid', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 117, name: 'plex' })
|
||||||
|
const res = await request(app).get('/api/instances/117')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.name).toBe('plex')
|
||||||
|
expect(res.body.vmid).toBe(117)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 404 for unknown vmid', async () => {
|
||||||
|
const res = await request(app).get('/api/instances/999')
|
||||||
|
expect(res.status).toBe(404)
|
||||||
|
expect(res.body.error).toBeDefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for non-numeric vmid', async () => {
|
||||||
|
const res = await request(app).get('/api/instances/abc')
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── POST /api/instances ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('POST /api/instances', () => {
|
||||||
|
it('creates an instance and returns 201 with the created record', async () => {
|
||||||
|
const res = await request(app).post('/api/instances').send(base)
|
||||||
|
expect(res.status).toBe(201)
|
||||||
|
expect(res.body.name).toBe('traefik')
|
||||||
|
expect(res.body.vmid).toBe(100)
|
||||||
|
expect(res.body.created_at).not.toBeNull()
|
||||||
|
expect(res.body.updated_at).not.toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('stores service flags correctly', async () => {
|
||||||
|
const res = await request(app).post('/api/instances').send({ ...base, atlas: 1, tailscale: 1, hardware_acceleration: 1 })
|
||||||
|
expect(res.body.atlas).toBe(1)
|
||||||
|
expect(res.body.tailscale).toBe(1)
|
||||||
|
expect(res.body.hardware_acceleration).toBe(1)
|
||||||
|
expect(res.body.argus).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 409 for duplicate vmid', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const res = await request(app).post('/api/instances').send({ ...base, name: 'other' })
|
||||||
|
expect(res.status).toBe(409)
|
||||||
|
expect(res.body.error).toMatch(/vmid/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 when name is missing', async () => {
|
||||||
|
const res = await request(app).post('/api/instances').send({ ...base, name: '' })
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
expect(res.body.errors).toBeInstanceOf(Array)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for vmid less than 1', async () => {
|
||||||
|
const res = await request(app).post('/api/instances').send({ ...base, vmid: 0 })
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
expect(res.body.errors).toBeInstanceOf(Array)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for invalid state', async () => {
|
||||||
|
const res = await request(app).post('/api/instances').send({ ...base, state: 'invalid' })
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for invalid stack', async () => {
|
||||||
|
const res = await request(app).post('/api/instances').send({ ...base, stack: 'invalid' })
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('trims whitespace from name', async () => {
|
||||||
|
const res = await request(app).post('/api/instances').send({ ...base, name: ' plex ' })
|
||||||
|
expect(res.status).toBe(201)
|
||||||
|
expect(res.body.name).toBe('plex')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── PUT /api/instances/:vmid ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('PUT /api/instances/:vmid', () => {
|
||||||
|
it('updates fields and returns the updated record', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const res = await request(app).put('/api/instances/100').send({ ...base, name: 'updated', state: 'degraded' })
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.name).toBe('updated')
|
||||||
|
expect(res.body.state).toBe('degraded')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('can change the vmid', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
await request(app).put('/api/instances/100').send({ ...base, vmid: 200 })
|
||||||
|
expect((await request(app).get('/api/instances/100')).status).toBe(404)
|
||||||
|
expect((await request(app).get('/api/instances/200')).status).toBe(200)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 404 for unknown vmid', async () => {
|
||||||
|
const res = await request(app).put('/api/instances/999').send(base)
|
||||||
|
expect(res.status).toBe(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for validation errors', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const res = await request(app).put('/api/instances/100').send({ ...base, name: '' })
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
expect(res.body.errors).toBeInstanceOf(Array)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 409 when new vmid conflicts with an existing instance', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 100, name: 'a' })
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 200, name: 'b' })
|
||||||
|
const res = await request(app).put('/api/instances/100').send({ ...base, vmid: 200 })
|
||||||
|
expect(res.status).toBe(409)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── DELETE /api/instances/:vmid ───────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('DELETE /api/instances/:vmid', () => {
|
||||||
|
it('deletes a development instance and returns 204', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, stack: 'development', state: 'testing' })
|
||||||
|
const res = await request(app).delete('/api/instances/100')
|
||||||
|
expect(res.status).toBe(204)
|
||||||
|
expect((await request(app).get('/api/instances/100')).status).toBe(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 422 when attempting to delete a production instance', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, stack: 'production' })
|
||||||
|
const res = await request(app).delete('/api/instances/100')
|
||||||
|
expect(res.status).toBe(422)
|
||||||
|
expect(res.body.error).toMatch(/development/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 404 for unknown vmid', async () => {
|
||||||
|
const res = await request(app).delete('/api/instances/999')
|
||||||
|
expect(res.status).toBe(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for non-numeric vmid', async () => {
|
||||||
|
const res = await request(app).delete('/api/instances/abc')
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── GET /api/instances/:vmid/history ─────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('GET /api/instances/:vmid/history', () => {
|
||||||
|
it('returns history events for a known vmid', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const res = await request(app).get('/api/instances/100/history')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body).toBeInstanceOf(Array)
|
||||||
|
expect(res.body[0].field).toBe('created')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 404 for unknown vmid', async () => {
|
||||||
|
expect((await request(app).get('/api/instances/999/history')).status).toBe(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for non-numeric vmid', async () => {
|
||||||
|
expect((await request(app).get('/api/instances/abc/history')).status).toBe(400)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── GET /api/export ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('GET /api/export', () => {
|
||||||
|
it('returns 200 with instances array and attachment header', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const res = await request(app).get('/api/export')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.headers['content-disposition']).toMatch(/attachment/)
|
||||||
|
expect(res.body.instances).toHaveLength(1)
|
||||||
|
expect(res.body.instances[0].name).toBe('traefik')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns empty instances array when no data', async () => {
|
||||||
|
const res = await request(app).get('/api/export')
|
||||||
|
expect(res.body.instances).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns version 3', async () => {
|
||||||
|
const res = await request(app).get('/api/export')
|
||||||
|
expect(res.body.version).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('includes a history array', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const res = await request(app).get('/api/export')
|
||||||
|
expect(res.body.history).toBeInstanceOf(Array)
|
||||||
|
expect(res.body.history.some(e => e.field === 'created')).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('includes jobs and job_runs arrays', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const res = await request(app).get('/api/export')
|
||||||
|
expect(res.body.jobs).toBeInstanceOf(Array)
|
||||||
|
expect(res.body.jobs).toHaveLength(1)
|
||||||
|
expect(res.body.jobs[0].key).toBe('tailscale_sync')
|
||||||
|
expect(res.body.job_runs).toBeInstanceOf(Array)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('exports raw job config without masking', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const res = await request(app).get('/api/export')
|
||||||
|
expect(res.body.jobs[0].config).toContain('tskey-test')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── POST /api/import ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('POST /api/import', () => {
|
||||||
|
it('replaces all instances and returns imported count', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const res = await request(app).post('/api/import')
|
||||||
|
.send({ instances: [{ ...base, vmid: 999, name: 'imported' }] })
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.imported).toBe(1)
|
||||||
|
expect((await request(app).get('/api/instances')).body[0].name).toBe('imported')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 if instances is not an array', async () => {
|
||||||
|
expect((await request(app).post('/api/import').send({ instances: 'bad' })).status).toBe(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 with per-row errors for invalid rows', async () => {
|
||||||
|
const res = await request(app).post('/api/import')
|
||||||
|
.send({ instances: [{ ...base, name: '', vmid: 1 }] })
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
expect(res.body.errors[0].index).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 if body has no instances key', async () => {
|
||||||
|
expect((await request(app).post('/api/import').send({})).status).toBe(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 (not 500) when a row is missing name', async () => {
|
||||||
|
const res = await request(app).post('/api/import')
|
||||||
|
.send({ instances: [{ ...base, name: undefined, vmid: 1 }] })
|
||||||
|
expect(res.status).toBe(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('restores history when history array is provided', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
const exp = await request(app).get('/api/export')
|
||||||
|
await request(app).post('/api/instances').send({ ...base, vmid: 999, name: 'other' })
|
||||||
|
const res = await request(app).post('/api/import').send({
|
||||||
|
instances: exp.body.instances,
|
||||||
|
history: exp.body.history,
|
||||||
|
})
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
const hist = await request(app).get('/api/instances/100/history')
|
||||||
|
expect(hist.body.some(e => e.field === 'created')).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('succeeds with a v1 backup that has no history key', async () => {
|
||||||
|
const res = await request(app).post('/api/import')
|
||||||
|
.send({ instances: [{ ...base, vmid: 1, name: 'legacy' }] })
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.imported).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('imports jobs and job_runs and returns imported_jobs count', async () => {
|
||||||
|
const exp = await request(app).get('/api/export')
|
||||||
|
createJob(testJob)
|
||||||
|
const fullExport = await request(app).get('/api/export')
|
||||||
|
const res = await request(app).post('/api/import').send({
|
||||||
|
instances: fullExport.body.instances,
|
||||||
|
history: fullExport.body.history,
|
||||||
|
jobs: fullExport.body.jobs,
|
||||||
|
job_runs: fullExport.body.job_runs,
|
||||||
|
})
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.imported_jobs).toBe(1)
|
||||||
|
expect((await request(app).get('/api/jobs')).body).toHaveLength(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('leaves jobs untouched when no jobs key in payload', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
await request(app).post('/api/import')
|
||||||
|
.send({ instances: [{ ...base, vmid: 1, name: 'x' }] })
|
||||||
|
expect((await request(app).get('/api/jobs')).body).toHaveLength(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── Static assets & SPA routing ───────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('static assets and SPA routing', () => {
|
||||||
|
it('serves index.html at root', async () => {
|
||||||
|
const res = await request(app).get('/')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.headers['content-type']).toMatch(/html/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('serves index.html for deep SPA routes (e.g. /instance/117)', async () => {
|
||||||
|
const res = await request(app).get('/instance/117')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.headers['content-type']).toMatch(/html/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('serves CSS with correct content-type (not sniffed as HTML)', async () => {
|
||||||
|
const res = await request(app).get('/css/app.css')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.headers['content-type']).toMatch(/text\/css/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not set upgrade-insecure-requests in CSP (HTTP deployments must work)', async () => {
|
||||||
|
const res = await request(app).get('/')
|
||||||
|
const csp = res.headers['content-security-policy'] ?? ''
|
||||||
|
expect(csp).not.toContain('upgrade-insecure-requests')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('allows inline event handlers in CSP (onclick attributes)', async () => {
|
||||||
|
const res = await request(app).get('/')
|
||||||
|
const csp = res.headers['content-security-policy'] ?? ''
|
||||||
|
// script-src-attr must not be 'none' — that blocks onclick handlers
|
||||||
|
expect(csp).not.toContain("script-src-attr 'none'")
|
||||||
|
})
|
||||||
|
|
||||||
|
it('index.html contains base href / for correct asset resolution on deep routes', async () => {
|
||||||
|
const res = await request(app).get('/')
|
||||||
|
expect(res.text).toContain('<base href="/">')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── Error handling — unexpected DB failures ───────────────────────────────────
|
||||||
|
|
||||||
|
const dbError = () => Object.assign(
|
||||||
|
new Error('attempt to write a readonly database'),
|
||||||
|
{ code: 'ERR_SQLITE_ERROR', errcode: 8 }
|
||||||
|
)
|
||||||
|
|
||||||
|
describe('error handling — unexpected DB failures', () => {
|
||||||
|
let consoleSpy
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('POST returns 500 with friendly message when DB throws unexpectedly', async () => {
|
||||||
|
vi.spyOn(dbModule, 'createInstance').mockImplementationOnce(() => { throw dbError() })
|
||||||
|
const res = await request(app).post('/api/instances').send(base)
|
||||||
|
expect(res.status).toBe(500)
|
||||||
|
expect(res.body).toEqual({ error: 'internal server error' })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('POST logs the error with route context when DB throws unexpectedly', async () => {
|
||||||
|
vi.spyOn(dbModule, 'createInstance').mockImplementationOnce(() => { throw dbError() })
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
expect(consoleSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('POST /api/instances'),
|
||||||
|
expect.any(Error)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('PUT returns 500 with friendly message when DB throws unexpectedly', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
vi.spyOn(dbModule, 'updateInstance').mockImplementationOnce(() => { throw dbError() })
|
||||||
|
const res = await request(app).put('/api/instances/100').send(base)
|
||||||
|
expect(res.status).toBe(500)
|
||||||
|
expect(res.body).toEqual({ error: 'internal server error' })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('PUT logs the error with route context when DB throws unexpectedly', async () => {
|
||||||
|
await request(app).post('/api/instances').send(base)
|
||||||
|
vi.spyOn(dbModule, 'updateInstance').mockImplementationOnce(() => { throw dbError() })
|
||||||
|
await request(app).put('/api/instances/100').send(base)
|
||||||
|
expect(consoleSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('PUT /api/instances/:vmid'),
|
||||||
|
expect.any(Error)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('DELETE returns 500 with friendly message when DB throws unexpectedly', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, stack: 'development', state: 'testing' })
|
||||||
|
vi.spyOn(dbModule, 'deleteInstance').mockImplementationOnce(() => { throw dbError() })
|
||||||
|
const res = await request(app).delete('/api/instances/100')
|
||||||
|
expect(res.status).toBe(500)
|
||||||
|
expect(res.body).toEqual({ error: 'internal server error' })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('DELETE logs the error with route context when DB throws unexpectedly', async () => {
|
||||||
|
await request(app).post('/api/instances').send({ ...base, stack: 'development', state: 'testing' })
|
||||||
|
vi.spyOn(dbModule, 'deleteInstance').mockImplementationOnce(() => { throw dbError() })
|
||||||
|
await request(app).delete('/api/instances/100')
|
||||||
|
expect(consoleSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('DELETE /api/instances/:vmid'),
|
||||||
|
expect.any(Error)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const testJob = {
|
||||||
|
key: 'tailscale_sync', name: 'Tailscale Sync', description: 'Test job',
|
||||||
|
enabled: 0, schedule: 15,
|
||||||
|
config: JSON.stringify({ api_key: 'tskey-test', tailnet: 'example.com' }),
|
||||||
|
}
|
||||||
|
|
||||||
|
const patchmonJob = {
|
||||||
|
key: 'patchmon_sync', name: 'Patchmon Sync', description: 'Test patchmon job',
|
||||||
|
enabled: 0, schedule: 60,
|
||||||
|
config: JSON.stringify({ api_url: 'http://patchmon:3000/api/v1/api/hosts', api_token: 'secret-token' }),
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── GET /api/jobs ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('GET /api/jobs', () => {
|
||||||
|
it('returns empty array when no jobs', async () => {
|
||||||
|
const res = await request(app).get('/api/jobs')
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns jobs with masked api key', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const res = await request(app).get('/api/jobs')
|
||||||
|
expect(res.body).toHaveLength(1)
|
||||||
|
expect(res.body[0].config.api_key).toBe('**REDACTED**')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns jobs with masked api_token', async () => {
|
||||||
|
createJob(patchmonJob)
|
||||||
|
const res = await request(app).get('/api/jobs')
|
||||||
|
expect(res.body[0].config.api_token).toBe('**REDACTED**')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── GET /api/jobs/:id ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('GET /api/jobs/:id', () => {
|
||||||
|
it('returns job with runs array', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
const res = await request(app).get(`/api/jobs/${id}`)
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.runs).toBeInstanceOf(Array)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 404 for unknown id', async () => {
|
||||||
|
expect((await request(app).get('/api/jobs/999')).status).toBe(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 for non-numeric id', async () => {
|
||||||
|
expect((await request(app).get('/api/jobs/abc')).status).toBe(400)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── PUT /api/jobs/:id ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('PUT /api/jobs/:id', () => {
|
||||||
|
it('updates enabled and schedule', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
const res = await request(app).put(`/api/jobs/${id}`).send({ enabled: true, schedule: 30 })
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.enabled).toBe(1)
|
||||||
|
expect(res.body.schedule).toBe(30)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not overwrite api_key when **REDACTED** is sent', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
await request(app).put(`/api/jobs/${id}`).send({ config: { api_key: '**REDACTED**' } })
|
||||||
|
expect(dbModule.getJob(id).config).toContain('tskey-test')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 404 for unknown id', async () => {
|
||||||
|
expect((await request(app).put('/api/jobs/999').send({})).status).toBe(404)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── POST /api/jobs/:id/run ────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('POST /api/jobs/:id/run', () => {
|
||||||
|
afterEach(() => vi.unstubAllGlobals())
|
||||||
|
|
||||||
|
it('returns 404 for unknown id', async () => {
|
||||||
|
expect((await request(app).post('/api/jobs/999/run')).status).toBe(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('runs job, returns summary, and logs the run', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
vi.stubGlobal('fetch', vi.fn().mockResolvedValueOnce({
|
||||||
|
ok: true,
|
||||||
|
json: async () => ({ devices: [] }),
|
||||||
|
}))
|
||||||
|
const res = await request(app).post(`/api/jobs/${id}/run`)
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.summary).toBeDefined()
|
||||||
|
const detail = await request(app).get(`/api/jobs/${id}`)
|
||||||
|
expect(detail.body.runs).toHaveLength(1)
|
||||||
|
expect(detail.body.runs[0].status).toBe('success')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('logs error run on failure', async () => {
|
||||||
|
createJob(testJob)
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
vi.stubGlobal('fetch', vi.fn().mockRejectedValueOnce(new Error('network error')))
|
||||||
|
const res = await request(app).post(`/api/jobs/${id}/run`)
|
||||||
|
expect(res.status).toBe(500)
|
||||||
|
const detail = await request(app).get(`/api/jobs/${id}`)
|
||||||
|
expect(detail.body.runs[0].status).toBe('error')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('patchmon_sync: marks instances present in host list as patchmon=1', async () => {
|
||||||
|
createJob(patchmonJob)
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
vi.stubGlobal('fetch', vi.fn().mockResolvedValueOnce({
|
||||||
|
ok: true,
|
||||||
|
json: async () => [{ name: 'plex' }, { name: 'traefik' }],
|
||||||
|
}))
|
||||||
|
const res = await request(app).post(`/api/jobs/${id}/run`)
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.summary).toMatch(/updated of/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('patchmon_sync: returns 500 when API token is missing', async () => {
|
||||||
|
createJob({ ...patchmonJob, config: JSON.stringify({ api_url: 'http://patchmon:3000/api/v1/api/hosts', api_token: '' }) })
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
const res = await request(app).post(`/api/jobs/${id}/run`)
|
||||||
|
expect(res.status).toBe(500)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('semaphore_sync: parses ansible inventory and updates instances', async () => {
|
||||||
|
const semaphoreJob = {
|
||||||
|
key: 'semaphore_sync', name: 'Semaphore Sync', description: 'test',
|
||||||
|
enabled: 0, schedule: 60,
|
||||||
|
config: JSON.stringify({ api_url: 'http://semaphore:3000/api/project/1/inventory/1', api_token: 'bearer-token' }),
|
||||||
|
}
|
||||||
|
createJob(semaphoreJob)
|
||||||
|
const id = (await request(app).get('/api/jobs')).body[0].id
|
||||||
|
vi.stubGlobal('fetch', vi.fn().mockResolvedValueOnce({
|
||||||
|
ok: true,
|
||||||
|
json: async () => ({ inventory: '[production]\nplex\nhomeassistant\n' }),
|
||||||
|
}))
|
||||||
|
const res = await request(app).post(`/api/jobs/${id}/run`)
|
||||||
|
expect(res.status).toBe(200)
|
||||||
|
expect(res.body.summary).toMatch(/updated of/)
|
||||||
|
})
|
||||||
|
})
|
||||||
556
tests/db.test.js
556
tests/db.test.js
@@ -1,250 +1,388 @@
|
|||||||
import { describe, it, expect, beforeEach } from 'vitest'
|
import { describe, it, expect, beforeEach } from 'vitest'
|
||||||
import initSqlJs from 'sql.js'
|
import {
|
||||||
|
_resetForTest,
|
||||||
|
getInstances, getInstance, getDistinctStacks,
|
||||||
|
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory,
|
||||||
|
getConfig, setConfig,
|
||||||
|
getJobs, getJob, createJob, updateJob, createJobRun, completeJobRun, getJobRuns,
|
||||||
|
} from '../server/db.js'
|
||||||
|
|
||||||
// ── Schema (mirrors db.js) ────────────────────────────────────────────────────
|
beforeEach(() => _resetForTest());
|
||||||
|
|
||||||
const SCHEMA = `
|
// ── getInstances ──────────────────────────────────────────────────────────────
|
||||||
CREATE TABLE instances (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
state TEXT DEFAULT 'deployed',
|
|
||||||
stack TEXT DEFAULT '',
|
|
||||||
vmid INTEGER UNIQUE NOT NULL,
|
|
||||||
atlas INTEGER DEFAULT 0,
|
|
||||||
argus INTEGER DEFAULT 0,
|
|
||||||
semaphore INTEGER DEFAULT 0,
|
|
||||||
patchmon INTEGER DEFAULT 0,
|
|
||||||
tailscale INTEGER DEFAULT 0,
|
|
||||||
andromeda INTEGER DEFAULT 0,
|
|
||||||
tailscale_ip TEXT DEFAULT '',
|
|
||||||
hardware_acceleration INTEGER DEFAULT 0,
|
|
||||||
createdAt TEXT DEFAULT (datetime('now')),
|
|
||||||
updatedAt TEXT DEFAULT (datetime('now'))
|
|
||||||
)
|
|
||||||
`
|
|
||||||
|
|
||||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
let db
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
const SQL = await initSqlJs()
|
|
||||||
db = new SQL.Database()
|
|
||||||
db.run(SCHEMA)
|
|
||||||
})
|
|
||||||
|
|
||||||
function rows(res) {
|
|
||||||
if (!res.length) return []
|
|
||||||
const cols = res[0].columns
|
|
||||||
return res[0].values.map(row => Object.fromEntries(cols.map((c, i) => [c, row[i]])))
|
|
||||||
}
|
|
||||||
|
|
||||||
function insert(overrides = {}) {
|
|
||||||
const defaults = {
|
|
||||||
name: 'test-instance', state: 'deployed', stack: 'production', vmid: 100,
|
|
||||||
atlas: 0, argus: 0, semaphore: 0, patchmon: 0,
|
|
||||||
tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0,
|
|
||||||
}
|
|
||||||
const d = { ...defaults, ...overrides }
|
|
||||||
db.run(
|
|
||||||
`INSERT INTO instances
|
|
||||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon, tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
||||||
[d.name, d.state, d.stack, d.vmid, d.atlas, d.argus, d.semaphore,
|
|
||||||
d.patchmon, d.tailscale, d.andromeda, d.tailscale_ip, d.hardware_acceleration]
|
|
||||||
)
|
|
||||||
return d
|
|
||||||
}
|
|
||||||
|
|
||||||
function getInstances(filters = {}) {
|
|
||||||
let sql = 'SELECT * FROM instances WHERE 1=1'
|
|
||||||
const params = []
|
|
||||||
if (filters.search) {
|
|
||||||
sql += ' AND (name LIKE ? OR CAST(vmid AS TEXT) LIKE ? OR stack LIKE ?)'
|
|
||||||
const s = `%${filters.search}%`
|
|
||||||
params.push(s, s, s)
|
|
||||||
}
|
|
||||||
if (filters.state) { sql += ' AND state = ?'; params.push(filters.state) }
|
|
||||||
if (filters.stack) { sql += ' AND stack = ?'; params.push(filters.stack) }
|
|
||||||
sql += ' ORDER BY name ASC'
|
|
||||||
return rows(db.exec(sql, params))
|
|
||||||
}
|
|
||||||
|
|
||||||
function getInstance(vmid) {
|
|
||||||
const res = rows(db.exec('SELECT * FROM instances WHERE vmid = ?', [vmid]))
|
|
||||||
return res[0] ?? null
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDistinctStacks() {
|
|
||||||
const res = db.exec(`SELECT DISTINCT stack FROM instances WHERE stack != '' ORDER BY stack`)
|
|
||||||
if (!res.length) return []
|
|
||||||
return res[0].values.map(r => r[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
describe('getInstances', () => {
|
describe('getInstances', () => {
|
||||||
it('returns empty array when no instances exist', () => {
|
it('returns empty array when table is empty', () => {
|
||||||
expect(getInstances()).toEqual([])
|
expect(getInstances()).toEqual([]);
|
||||||
})
|
});
|
||||||
|
|
||||||
it('returns all instances sorted by name', () => {
|
it('returns all instances sorted by name', () => {
|
||||||
insert({ name: 'zebra', vmid: 1 })
|
createInstance({ name: 'zebra', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ name: 'alpha', vmid: 2 })
|
createInstance({ name: 'alpha', state: 'deployed', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
const result = getInstances()
|
const result = getInstances();
|
||||||
expect(result).toHaveLength(2)
|
expect(result[0].name).toBe('alpha');
|
||||||
expect(result[0].name).toBe('alpha')
|
expect(result[1].name).toBe('zebra');
|
||||||
expect(result[1].name).toBe('zebra')
|
});
|
||||||
})
|
|
||||||
|
|
||||||
it('filters by state', () => {
|
it('filters by state', () => {
|
||||||
insert({ name: 'a', vmid: 1, state: 'deployed' })
|
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ name: 'b', vmid: 2, state: 'degraded' })
|
createInstance({ name: 'b', state: 'degraded', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ name: 'c', vmid: 3, state: 'testing' })
|
createInstance({ name: 'c', state: 'testing', stack: 'development', vmid: 3, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
expect(getInstances({ state: 'deployed' })).toHaveLength(1)
|
expect(getInstances({ state: 'deployed' })).toHaveLength(1);
|
||||||
expect(getInstances({ state: 'degraded' })).toHaveLength(1)
|
expect(getInstances({ state: 'degraded' })).toHaveLength(1);
|
||||||
expect(getInstances({ state: 'testing' })).toHaveLength(1)
|
expect(getInstances({ state: 'testing' })).toHaveLength(1);
|
||||||
})
|
});
|
||||||
|
|
||||||
it('filters by stack', () => {
|
it('filters by stack', () => {
|
||||||
insert({ name: 'a', vmid: 1, stack: 'production' })
|
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ name: 'b', vmid: 2, stack: 'development' })
|
createInstance({ name: 'b', state: 'testing', stack: 'development', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
expect(getInstances({ stack: 'production' })).toHaveLength(1)
|
expect(getInstances({ stack: 'production' })).toHaveLength(1);
|
||||||
expect(getInstances({ stack: 'development' })).toHaveLength(1)
|
expect(getInstances({ stack: 'development' })).toHaveLength(1);
|
||||||
})
|
});
|
||||||
|
|
||||||
it('searches by name', () => {
|
it('searches by name', () => {
|
||||||
insert({ name: 'plex', vmid: 1 })
|
createInstance({ name: 'plex', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ name: 'gitea', vmid: 2 })
|
createInstance({ name: 'gitea', state: 'deployed', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
expect(getInstances({ search: 'ple' })).toHaveLength(1)
|
expect(getInstances({ search: 'ple' })).toHaveLength(1);
|
||||||
expect(getInstances({ search: 'ple' })[0].name).toBe('plex')
|
expect(getInstances({ search: 'ple' })[0].name).toBe('plex');
|
||||||
})
|
});
|
||||||
|
|
||||||
it('searches by vmid', () => {
|
it('searches by vmid', () => {
|
||||||
insert({ name: 'a', vmid: 137 })
|
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 137, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ name: 'b', vmid: 200 })
|
createInstance({ name: 'b', state: 'deployed', stack: 'production', vmid: 200, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
expect(getInstances({ search: '137' })).toHaveLength(1)
|
expect(getInstances({ search: '137' })).toHaveLength(1);
|
||||||
})
|
});
|
||||||
|
|
||||||
it('searches by stack', () => {
|
it('combines filters', () => {
|
||||||
insert({ name: 'a', vmid: 1, stack: 'production' })
|
createInstance({ name: 'plex', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ name: 'b', vmid: 2, stack: 'development' })
|
createInstance({ name: 'plex2', state: 'degraded', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
expect(getInstances({ search: 'prod' })).toHaveLength(1)
|
expect(getInstances({ search: 'plex', state: 'deployed' })).toHaveLength(1);
|
||||||
})
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('combines search and state filters', () => {
|
// ── getInstance ───────────────────────────────────────────────────────────────
|
||||||
insert({ name: 'plex', vmid: 1, state: 'deployed' })
|
|
||||||
insert({ name: 'plex2', vmid: 2, state: 'degraded' })
|
|
||||||
expect(getInstances({ search: 'plex', state: 'deployed' })).toHaveLength(1)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('returns empty array when no results match', () => {
|
|
||||||
insert({ name: 'plex', vmid: 1 })
|
|
||||||
expect(getInstances({ search: 'zzz' })).toEqual([])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('getInstance', () => {
|
describe('getInstance', () => {
|
||||||
it('returns the instance with the given vmid', () => {
|
it('returns the instance with the given vmid', () => {
|
||||||
insert({ name: 'plex', vmid: 117 })
|
createInstance({ name: 'plex', state: 'deployed', stack: 'production', vmid: 117, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
const inst = getInstance(117)
|
const inst = getInstance(117);
|
||||||
expect(inst).not.toBeNull()
|
expect(inst).not.toBeNull();
|
||||||
expect(inst.name).toBe('plex')
|
expect(inst.name).toBe('plex');
|
||||||
expect(inst.vmid).toBe(117)
|
expect(inst.vmid).toBe(117);
|
||||||
})
|
});
|
||||||
|
|
||||||
it('returns null for an unknown vmid', () => {
|
it('returns null for unknown vmid', () => {
|
||||||
expect(getInstance(999)).toBeNull()
|
expect(getInstance(999)).toBeNull();
|
||||||
})
|
});
|
||||||
})
|
});
|
||||||
|
|
||||||
|
// ── getDistinctStacks ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
describe('getDistinctStacks', () => {
|
describe('getDistinctStacks', () => {
|
||||||
it('returns empty array when no instances exist', () => {
|
it('returns empty array when table is empty', () => {
|
||||||
expect(getDistinctStacks()).toEqual([])
|
expect(getDistinctStacks()).toEqual([]);
|
||||||
})
|
});
|
||||||
|
|
||||||
it('returns unique stacks sorted alphabetically', () => {
|
it('returns unique stacks sorted alphabetically', () => {
|
||||||
insert({ vmid: 1, stack: 'production' })
|
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ vmid: 2, stack: 'development' })
|
createInstance({ name: 'b', state: 'testing', stack: 'development', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
insert({ vmid: 3, stack: 'production' })
|
createInstance({ name: 'c', state: 'deployed', stack: 'production', vmid: 3, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||||
expect(getDistinctStacks()).toEqual(['development', 'production'])
|
expect(getDistinctStacks()).toEqual(['development', 'production']);
|
||||||
})
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('excludes blank stack values', () => {
|
// ── createInstance ────────────────────────────────────────────────────────────
|
||||||
insert({ vmid: 1, stack: '' })
|
|
||||||
insert({ vmid: 2, stack: 'production' })
|
|
||||||
expect(getDistinctStacks()).toEqual(['production'])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('createInstance', () => {
|
describe('createInstance', () => {
|
||||||
it('inserts a new instance', () => {
|
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||||
insert({ name: 'traefik', vmid: 100, stack: 'production', state: 'deployed' })
|
|
||||||
const inst = getInstance(100)
|
it('inserts a new instance and sets timestamps', () => {
|
||||||
expect(inst.name).toBe('traefik')
|
createInstance({ ...base, name: 'traefik', vmid: 100 });
|
||||||
expect(inst.stack).toBe('production')
|
const inst = getInstance(100);
|
||||||
expect(inst.state).toBe('deployed')
|
expect(inst.name).toBe('traefik');
|
||||||
})
|
expect(inst.created_at).not.toBeNull();
|
||||||
|
expect(inst.updated_at).not.toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
it('stores service flags correctly', () => {
|
it('stores service flags correctly', () => {
|
||||||
insert({ vmid: 1, atlas: 1, argus: 0, tailscale: 1, hardware_acceleration: 1 })
|
createInstance({ ...base, name: 'plex', vmid: 1, atlas: 1, tailscale: 1, hardware_acceleration: 1 });
|
||||||
const inst = getInstance(1)
|
const inst = getInstance(1);
|
||||||
expect(inst.atlas).toBe(1)
|
expect(inst.atlas).toBe(1);
|
||||||
expect(inst.argus).toBe(0)
|
expect(inst.argus).toBe(0);
|
||||||
expect(inst.tailscale).toBe(1)
|
expect(inst.tailscale).toBe(1);
|
||||||
expect(inst.hardware_acceleration).toBe(1)
|
expect(inst.hardware_acceleration).toBe(1);
|
||||||
})
|
});
|
||||||
|
|
||||||
it('rejects duplicate vmid', () => {
|
it('rejects duplicate vmid', () => {
|
||||||
insert({ vmid: 100 })
|
createInstance({ ...base, name: 'a', vmid: 100 });
|
||||||
expect(() => insert({ name: 'other', vmid: 100 })).toThrow()
|
expect(() => createInstance({ ...base, name: 'b', vmid: 100 })).toThrow();
|
||||||
})
|
});
|
||||||
|
|
||||||
it('sets createdAt and updatedAt on insert', () => {
|
it('rejects invalid state', () => {
|
||||||
insert({ vmid: 1 })
|
expect(() => createInstance({ ...base, name: 'a', vmid: 1, state: 'invalid' })).toThrow();
|
||||||
const inst = getInstance(1)
|
});
|
||||||
expect(inst.createdAt).not.toBeNull()
|
|
||||||
expect(inst.updatedAt).not.toBeNull()
|
it('rejects invalid stack', () => {
|
||||||
})
|
expect(() => createInstance({ ...base, name: 'a', vmid: 1, stack: 'invalid' })).toThrow();
|
||||||
})
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── updateInstance ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
describe('updateInstance', () => {
|
describe('updateInstance', () => {
|
||||||
it('updates fields on an existing instance', () => {
|
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||||
insert({ name: 'old-name', vmid: 100, state: 'testing', stack: 'development' })
|
|
||||||
const before = getInstance(100)
|
|
||||||
db.run(
|
|
||||||
`UPDATE instances SET name=?, state=?, stack=?, updatedAt=datetime('now') WHERE id=?`,
|
|
||||||
['new-name', 'deployed', 'production', before.id]
|
|
||||||
)
|
|
||||||
const after = getInstance(100)
|
|
||||||
expect(after.name).toBe('new-name')
|
|
||||||
expect(after.state).toBe('deployed')
|
|
||||||
expect(after.stack).toBe('production')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('updates updatedAt on write', () => {
|
it('updates fields and refreshes updated_at', () => {
|
||||||
insert({ vmid: 1 })
|
createInstance({ ...base, name: 'old', vmid: 100 });
|
||||||
const before = getInstance(1)
|
updateInstance(100, { ...base, name: 'new', vmid: 100, state: 'degraded' });
|
||||||
db.run(`UPDATE instances SET name=?, updatedAt=datetime('now') WHERE id=?`, ['updated', before.id])
|
const inst = getInstance(100);
|
||||||
const after = getInstance(1)
|
expect(inst.name).toBe('new');
|
||||||
expect(after.updatedAt).not.toBeNull()
|
expect(inst.state).toBe('degraded');
|
||||||
})
|
});
|
||||||
})
|
|
||||||
|
it('can change vmid', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 100 });
|
||||||
|
updateInstance(100, { ...base, name: 'a', vmid: 200 });
|
||||||
|
expect(getInstance(100)).toBeNull();
|
||||||
|
expect(getInstance(200)).not.toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── deleteInstance ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
describe('deleteInstance', () => {
|
describe('deleteInstance', () => {
|
||||||
|
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||||
|
|
||||||
it('removes the instance', () => {
|
it('removes the instance', () => {
|
||||||
insert({ vmid: 1 })
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
const inst = getInstance(1)
|
deleteInstance(1);
|
||||||
db.run('DELETE FROM instances WHERE id = ?', [inst.id])
|
expect(getInstance(1)).toBeNull();
|
||||||
expect(getInstance(1)).toBeNull()
|
});
|
||||||
})
|
|
||||||
|
|
||||||
it('only removes the targeted instance', () => {
|
it('only removes the targeted instance', () => {
|
||||||
insert({ name: 'a', vmid: 1 })
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
insert({ name: 'b', vmid: 2 })
|
createInstance({ ...base, name: 'b', vmid: 2 });
|
||||||
const inst = getInstance(1)
|
deleteInstance(1);
|
||||||
db.run('DELETE FROM instances WHERE id = ?', [inst.id])
|
expect(getInstance(1)).toBeNull();
|
||||||
expect(getInstance(1)).toBeNull()
|
expect(getInstance(2)).not.toBeNull();
|
||||||
expect(getInstance(2)).not.toBeNull()
|
});
|
||||||
})
|
|
||||||
})
|
it('clears history for the deleted instance', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
|
deleteInstance(1);
|
||||||
|
expect(getInstanceHistory(1)).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not clear history for other instances', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
|
createInstance({ ...base, name: 'b', vmid: 2 });
|
||||||
|
deleteInstance(1);
|
||||||
|
expect(getInstanceHistory(2).length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── importInstances ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('importInstances', () => {
|
||||||
|
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||||
|
|
||||||
|
it('replaces all existing instances with the imported set', () => {
|
||||||
|
createInstance({ ...base, name: 'old', vmid: 1 });
|
||||||
|
importInstances([{ ...base, name: 'new', vmid: 2 }]);
|
||||||
|
expect(getInstance(1)).toBeNull();
|
||||||
|
expect(getInstance(2)).not.toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('clears all instances when passed an empty array', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
|
importInstances([]);
|
||||||
|
expect(getInstances()).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('clears history for all replaced instances', () => {
|
||||||
|
createInstance({ ...base, name: 'old', vmid: 1 });
|
||||||
|
importInstances([{ ...base, name: 'new', vmid: 2 }]);
|
||||||
|
expect(getInstanceHistory(1)).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('restores history rows when provided', () => {
|
||||||
|
importInstances(
|
||||||
|
[{ ...base, name: 'a', vmid: 1 }],
|
||||||
|
[{ vmid: 1, field: 'created', old_value: null, new_value: null, changed_at: '2026-01-01 00:00:00' }]
|
||||||
|
);
|
||||||
|
const h = getInstanceHistory(1);
|
||||||
|
expect(h.some(e => e.field === 'created')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── instance history ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('instance history', () => {
|
||||||
|
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||||
|
|
||||||
|
it('logs a created event when an instance is created', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
|
const h = getInstanceHistory(1);
|
||||||
|
expect(h).toHaveLength(1);
|
||||||
|
expect(h[0].field).toBe('created');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('logs changed fields when an instance is updated', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
|
updateInstance(1, { ...base, name: 'a', vmid: 1, state: 'degraded' });
|
||||||
|
const h = getInstanceHistory(1);
|
||||||
|
const stateEvt = h.find(e => e.field === 'state');
|
||||||
|
expect(stateEvt).toBeDefined();
|
||||||
|
expect(stateEvt.old_value).toBe('deployed');
|
||||||
|
expect(stateEvt.new_value).toBe('degraded');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('logs no events when nothing changes on update', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
|
updateInstance(1, { ...base, name: 'a', vmid: 1 });
|
||||||
|
const h = getInstanceHistory(1).filter(e => e.field !== 'created');
|
||||||
|
expect(h).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('records history under the new vmid when vmid changes', () => {
|
||||||
|
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||||
|
updateInstance(1, { ...base, name: 'a', vmid: 2 });
|
||||||
|
expect(getInstanceHistory(2).some(e => e.field === 'vmid')).toBe(true);
|
||||||
|
expect(getInstanceHistory(1).filter(e => e.field !== 'created')).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Test environment boot isolation ───────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('test environment boot isolation', () => {
|
||||||
|
it('vitest runs with NODE_ENV=test', () => {
|
||||||
|
// Vitest sets NODE_ENV=test automatically. This is the guard condition
|
||||||
|
// that prevents the boot init() from opening the real database file.
|
||||||
|
expect(process.env.NODE_ENV).toBe('test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('db module loads cleanly in parallel workers without locking the real db file', () => {
|
||||||
|
// Regression: the module-level init(DEFAULT_PATH) used to run unconditionally,
|
||||||
|
// causing "database is locked" when multiple test workers imported db.js at
|
||||||
|
// the same time. process.exit(1) then killed the worker mid-suite.
|
||||||
|
// Fix: boot init is skipped when NODE_ENV=test. _resetForTest() handles setup.
|
||||||
|
// Reaching this line proves the module loaded without calling process.exit.
|
||||||
|
expect(() => _resetForTest()).not.toThrow();
|
||||||
|
expect(getInstances()).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── getConfig / setConfig ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('getConfig / setConfig', () => {
|
||||||
|
it('returns defaultVal when key does not exist', () => {
|
||||||
|
expect(getConfig('missing', 'fallback')).toBe('fallback');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty string by default', () => {
|
||||||
|
expect(getConfig('missing')).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stores and retrieves a value', () => {
|
||||||
|
setConfig('tailscale_api_key', 'tskey-test');
|
||||||
|
expect(getConfig('tailscale_api_key')).toBe('tskey-test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('overwrites an existing key', () => {
|
||||||
|
setConfig('tailscale_enabled', '0');
|
||||||
|
setConfig('tailscale_enabled', '1');
|
||||||
|
expect(getConfig('tailscale_enabled')).toBe('1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('config is cleared by _resetForTest', () => {
|
||||||
|
setConfig('tailscale_api_key', 'tskey-test');
|
||||||
|
_resetForTest();
|
||||||
|
expect(getConfig('tailscale_api_key')).toBe('');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── jobs ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const baseJob = {
|
||||||
|
key: 'test_job', name: 'Test Job', description: 'desc',
|
||||||
|
enabled: 0, schedule: 15, config: '{}',
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('jobs', () => {
|
||||||
|
it('returns empty array when no jobs', () => {
|
||||||
|
expect(getJobs()).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('createJob + getJobs returns the job', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
expect(getJobs()).toHaveLength(1);
|
||||||
|
expect(getJobs()[0].name).toBe('Test Job');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getJob returns null for unknown id', () => {
|
||||||
|
expect(getJob(999)).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('updateJob changes enabled and schedule', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
const id = getJobs()[0].id;
|
||||||
|
updateJob(id, { enabled: 1, schedule: 30, config: '{}' });
|
||||||
|
expect(getJob(id).enabled).toBe(1);
|
||||||
|
expect(getJob(id).schedule).toBe(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getJobs includes last_status null when no runs', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
expect(getJobs()[0].last_status).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getJobs reflects last_status after a run', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
const id = getJobs()[0].id;
|
||||||
|
const runId = createJobRun(id);
|
||||||
|
completeJobRun(runId, 'success', 'ok');
|
||||||
|
expect(getJobs()[0].last_status).toBe('success');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── job_runs ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
describe('job_runs', () => {
|
||||||
|
it('createJobRun returns a positive id', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
const id = getJobs()[0].id;
|
||||||
|
expect(createJobRun(id)).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('new run has status running and no ended_at', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
const id = getJobs()[0].id;
|
||||||
|
const runId = createJobRun(id);
|
||||||
|
const runs = getJobRuns(id);
|
||||||
|
expect(runs[0].status).toBe('running');
|
||||||
|
expect(runs[0].ended_at).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('completeJobRun sets status, result, and ended_at', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
const id = getJobs()[0].id;
|
||||||
|
const runId = createJobRun(id);
|
||||||
|
completeJobRun(runId, 'success', '2 updated of 8');
|
||||||
|
const run = getJobRuns(id)[0];
|
||||||
|
expect(run.status).toBe('success');
|
||||||
|
expect(run.result).toBe('2 updated of 8');
|
||||||
|
expect(run.ended_at).not.toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getJobRuns returns newest first', () => {
|
||||||
|
createJob(baseJob);
|
||||||
|
const id = getJobs()[0].id;
|
||||||
|
const r1 = createJobRun(id);
|
||||||
|
const r2 = createJobRun(id);
|
||||||
|
completeJobRun(r1, 'success', 'first');
|
||||||
|
completeJobRun(r2, 'error', 'second');
|
||||||
|
const runs = getJobRuns(id);
|
||||||
|
expect(runs[0].id).toBe(r2);
|
||||||
|
expect(runs[1].id).toBe(r1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
|
// @vitest-environment jsdom
|
||||||
import { describe, it, expect } from 'vitest'
|
import { describe, it, expect } from 'vitest'
|
||||||
|
import { readFileSync } from 'fs'
|
||||||
|
import { join } from 'path'
|
||||||
|
|
||||||
// ── esc() ─────────────────────────────────────────────────────────────────────
|
// ── esc() ─────────────────────────────────────────────────────────────────────
|
||||||
// Mirrors the implementation in ui.js exactly (DOM-based).
|
// Mirrors the implementation in ui.js exactly (DOM-based).
|
||||||
@@ -55,16 +58,22 @@ describe('esc', () => {
|
|||||||
|
|
||||||
// ── fmtDate() ─────────────────────────────────────────────────────────────────
|
// ── fmtDate() ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function fmtDate(d) {
|
function parseUtc(d) {
|
||||||
|
if (typeof d !== 'string') return new Date(d)
|
||||||
|
const hasZone = d.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(d)
|
||||||
|
return new Date(hasZone ? d : d.replace(' ', 'T') + 'Z')
|
||||||
|
}
|
||||||
|
|
||||||
|
function fmtDate(d, tz = 'UTC') {
|
||||||
if (!d) return '—'
|
if (!d) return '—'
|
||||||
try {
|
try {
|
||||||
return new Date(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric' })
|
return parseUtc(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric', timeZone: tz })
|
||||||
} catch (e) { return d }
|
} catch (e) { return d }
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('fmtDate', () => {
|
describe('fmtDate', () => {
|
||||||
it('formats a valid ISO date string', () => {
|
it('formats a valid ISO date string', () => {
|
||||||
const result = fmtDate('2024-03-15T00:00:00')
|
const result = fmtDate('2024-03-15T12:00:00Z')
|
||||||
expect(result).toMatch(/Mar/)
|
expect(result).toMatch(/Mar/)
|
||||||
expect(result).toMatch(/15/)
|
expect(result).toMatch(/15/)
|
||||||
expect(result).toMatch(/2024/)
|
expect(result).toMatch(/2024/)
|
||||||
@@ -85,24 +94,42 @@ describe('fmtDate', () => {
|
|||||||
|
|
||||||
// ── fmtDateFull() ─────────────────────────────────────────────────────────────
|
// ── fmtDateFull() ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function fmtDateFull(d) {
|
function fmtDateFull(d, tz = 'UTC') {
|
||||||
if (!d) return '—'
|
if (!d) return '—'
|
||||||
try {
|
try {
|
||||||
return new Date(d).toLocaleString('en-US', {
|
return parseUtc(d).toLocaleString('en-US', {
|
||||||
year: 'numeric', month: 'short', day: 'numeric',
|
year: 'numeric', month: 'short', day: 'numeric',
|
||||||
hour: '2-digit', minute: '2-digit',
|
hour: '2-digit', minute: '2-digit',
|
||||||
|
timeZone: tz, timeZoneName: 'short',
|
||||||
})
|
})
|
||||||
} catch (e) { return d }
|
} catch (e) { return d }
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('fmtDateFull', () => {
|
describe('fmtDateFull', () => {
|
||||||
it('includes date and time components', () => {
|
it('includes date and time components', () => {
|
||||||
const result = fmtDateFull('2024-03-15T14:30:00')
|
const result = fmtDateFull('2024-03-15T14:30:00Z')
|
||||||
expect(result).toMatch(/Mar/)
|
expect(result).toMatch(/Mar/)
|
||||||
expect(result).toMatch(/2024/)
|
expect(result).toMatch(/2024/)
|
||||||
expect(result).toMatch(/\d{1,2}:\d{2}/)
|
expect(result).toMatch(/\d{1,2}:\d{2}/)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('includes the timezone abbreviation', () => {
|
||||||
|
expect(fmtDateFull('2024-03-15T14:30:00Z', 'UTC')).toMatch(/UTC/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts to the given timezone', () => {
|
||||||
|
// 2024-03-15 18:30 UTC = 2024-03-15 14:30 EDT (UTC-4 in March)
|
||||||
|
const result = fmtDateFull('2024-03-15T18:30:00Z', 'America/New_York')
|
||||||
|
expect(result).toMatch(/2:30/)
|
||||||
|
expect(result).toMatch(/EDT/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('treats SQLite-format timestamps (space, no Z) as UTC', () => {
|
||||||
|
// SQLite datetime('now') → 'YYYY-MM-DD HH:MM:SS', no timezone marker.
|
||||||
|
// Must parse identically to the same moment expressed as ISO UTC.
|
||||||
|
expect(fmtDateFull('2024-03-15 18:30:00', 'UTC')).toBe(fmtDateFull('2024-03-15T18:30:00Z', 'UTC'))
|
||||||
|
})
|
||||||
|
|
||||||
it('returns — for null', () => {
|
it('returns — for null', () => {
|
||||||
expect(fmtDateFull(null)).toBe('—')
|
expect(fmtDateFull(null)).toBe('—')
|
||||||
})
|
})
|
||||||
@@ -111,3 +138,111 @@ describe('fmtDateFull', () => {
|
|||||||
expect(fmtDateFull('')).toBe('—')
|
expect(fmtDateFull('')).toBe('—')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// ── versionLabel() ───────────────────────────────────────────────────────────
|
||||||
|
// Mirrors the logic in app.js — semver strings get a v prefix, dev strings don't.
|
||||||
|
|
||||||
|
function versionLabel(v) {
|
||||||
|
return /^\d/.test(v) ? `v${v}` : v
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('version label formatting', () => {
|
||||||
|
it('prepends v for semver strings', () => {
|
||||||
|
expect(versionLabel('1.1.2')).toBe('v1.1.2')
|
||||||
|
expect(versionLabel('2.0.0')).toBe('v2.0.0')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not prepend v for dev build strings', () => {
|
||||||
|
expect(versionLabel('dev-abc1234')).toBe('dev-abc1234')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── fmtHistVal() ─────────────────────────────────────────────────────────────
|
||||||
|
// Mirrors the logic in ui.js — formats history field values for display.
|
||||||
|
|
||||||
|
const BOOL_FIELDS = ['atlas','argus','semaphore','patchmon','tailscale','andromeda','hardware_acceleration']
|
||||||
|
|
||||||
|
function fmtHistVal(field, val) {
|
||||||
|
if (val == null || val === '') return '—'
|
||||||
|
if (BOOL_FIELDS.includes(field)) return val === '1' ? 'on' : 'off'
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('fmtHistVal', () => {
|
||||||
|
it('returns — for null', () => {
|
||||||
|
expect(fmtHistVal('state', null)).toBe('—')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns — for empty string', () => {
|
||||||
|
expect(fmtHistVal('state', '')).toBe('—')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns on/off for boolean service fields', () => {
|
||||||
|
expect(fmtHistVal('atlas', '1')).toBe('on')
|
||||||
|
expect(fmtHistVal('atlas', '0')).toBe('off')
|
||||||
|
expect(fmtHistVal('hardware_acceleration', '1')).toBe('on')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns the value as-is for non-boolean fields', () => {
|
||||||
|
expect(fmtHistVal('state', 'deployed')).toBe('deployed')
|
||||||
|
expect(fmtHistVal('name', 'plex')).toBe('plex')
|
||||||
|
expect(fmtHistVal('tailscale_ip', '100.64.0.1')).toBe('100.64.0.1')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── stateClass() ─────────────────────────────────────────────────────────────
|
||||||
|
// Mirrors the logic in ui.js — maps state values to timeline CSS classes.
|
||||||
|
|
||||||
|
function stateClass(field, val) {
|
||||||
|
if (field !== 'state') return ''
|
||||||
|
return { deployed: 'tl-deployed', testing: 'tl-testing', degraded: 'tl-degraded' }[val] ?? ''
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('stateClass', () => {
|
||||||
|
it('returns empty string for non-state fields', () => {
|
||||||
|
expect(stateClass('name', 'plex')).toBe('')
|
||||||
|
expect(stateClass('stack', 'production')).toBe('')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns the correct colour class for each state value', () => {
|
||||||
|
expect(stateClass('state', 'deployed')).toBe('tl-deployed')
|
||||||
|
expect(stateClass('state', 'testing')).toBe('tl-testing')
|
||||||
|
expect(stateClass('state', 'degraded')).toBe('tl-degraded')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns empty string for unknown state values', () => {
|
||||||
|
expect(stateClass('state', 'unknown')).toBe('')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── CSS regressions ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const css = readFileSync(join(__dirname, '../css/app.css'), 'utf8')
|
||||||
|
|
||||||
|
describe('CSS regressions', () => {
|
||||||
|
it('.badge has text-align: center so state labels are not left-skewed on cards', () => {
|
||||||
|
// Regression: badges rendered left-aligned inside the card's flex-end column.
|
||||||
|
// Without text-align: center, short labels (e.g. "deployed") appear
|
||||||
|
// left-justified inside their pill rather than centred.
|
||||||
|
expect(css).toMatch(/\.badge\s*\{[^}]*text-align\s*:\s*center/s)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// ── CI workflow regressions ───────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const ciYml = readFileSync(join(__dirname, '../.gitea/workflows/ci.yml'), 'utf8')
|
||||||
|
|
||||||
|
describe('CI workflow regressions', () => {
|
||||||
|
it('build-dev job passes BUILD_VERSION build arg', () => {
|
||||||
|
// Regression: dev image showed semver instead of dev-<sha> because
|
||||||
|
// BUILD_VERSION was never passed to docker build.
|
||||||
|
expect(ciYml).toContain('BUILD_VERSION')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('short SHA is computed with git rev-parse, not $GITEA_SHA (which is empty)', () => {
|
||||||
|
// Regression: ${GITEA_SHA::7} expands to "" on Gitea runners — nav showed "dev-".
|
||||||
|
// git rev-parse --short HEAD works regardless of which env vars the runner sets.
|
||||||
|
expect(ciYml).toContain('git rev-parse --short HEAD')
|
||||||
|
expect(ciYml).not.toContain('GITEA_SHA')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|||||||
@@ -2,6 +2,6 @@ import { defineConfig } from 'vitest/config'
|
|||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
test: {
|
test: {
|
||||||
environment: 'jsdom',
|
environment: 'node',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
Reference in New Issue
Block a user