Compare commits
96 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 07cef73fae | |||
| 1a84edc064 | |||
| bfb2c26821 | |||
| a985268987 | |||
| 218cdb08c5 | |||
| 2855cc7f81 | |||
| 07d2e215e4 | |||
| 8ef839d6d0 | |||
| 7af88328c8 | |||
| 096e2afb3d | |||
| e3d089a71f | |||
| 668e7c34bb | |||
| e796b4f400 | |||
| a4b5c20993 | |||
| d17f364fc5 | |||
| 5f79eec3dd | |||
| ed98bb57c0 | |||
| 120b61a423 | |||
| 074f0600af | |||
| e4f9407827 | |||
| fde5ce7dc1 | |||
| 20df10b333 | |||
| c906511bfc | |||
| 745e5920ad | |||
| 90e0a98914 | |||
| cba4b73798 | |||
| 0d567472a9 | |||
| 9f6b2ece52 | |||
| e3911157e9 | |||
| 0589288dfe | |||
| 8ead7687e5 | |||
| 0e1e9b6699 | |||
| 3c008c5bce | |||
| 1582c28b28 | |||
| bcd934f5b1 | |||
| 4c9acd20c7 | |||
| 520fb98d96 | |||
| 800184d2be | |||
| 82c314f85c | |||
| 2fba532ec7 | |||
| 9177578aaf | |||
| 94c4a0af51 | |||
| ec60d53767 | |||
| ad81d7ace7 | |||
| badd542bd7 | |||
| 7c31ee3327 | |||
| 0ecfa7dbc9 | |||
| f16fb3e088 | |||
| cb01573cdf | |||
| b48d5fb836 | |||
| 6e124576cb | |||
| 1f328e026d | |||
| 71c2c68fbc | |||
| 8bcf8229db | |||
| 6e1e9f7153 | |||
| 1fbb74d1ef | |||
| 617a5b5800 | |||
| 0985d9d481 | |||
| 2af6c56558 | |||
| af207339a4 | |||
| cd16b7ea28 | |||
| 20d8a13375 | |||
| f72aaa52f8 | |||
| dd47d5006e | |||
| 10e25e1803 | |||
| afbdefa549 | |||
| 1a62e2fdd9 | |||
| 1271c061fd | |||
| 7b2a996c21 | |||
| 3233d65db0 | |||
| f1e192c5d4 | |||
| 3037381084 | |||
| e54c1d4848 | |||
| 3ae3f98df5 | |||
| 65d6514603 | |||
| bc44bcbde9 | |||
| cae0f2222a | |||
| 28833a7ec6 | |||
| 6ba02bf17d | |||
| bfe71b2511 | |||
| 0f2a37cb39 | |||
| 73f4eabbc7 | |||
| 515ff8ddb3 | |||
| 08c12c9394 | |||
| 4ce7df4649 | |||
| 6c04a30c3a | |||
| c6cd8098fd | |||
| 15ed329743 | |||
| 1412b2e0b7 | |||
| 30b037ff9c | |||
| 7a5b5d7afc | |||
| 3383bee968 | |||
| 0c30e4bd29 | |||
| 01f83d25f6 | |||
| 79adc365d8 | |||
| 6e40413385 |
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(npm test:*)"
|
||||
"Bash(npm test:*)",
|
||||
"Bash(npm install:*)",
|
||||
"Bash(find /c/Users/josh1/Documents/Code/Catalyst -type f \\\\\\(-name *.test.js -o -name *.spec.js -o -name .env* -o -name *.config.js \\\\\\))"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
.git
|
||||
.gitea
|
||||
.gitignore
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
docker-compose.yml
|
||||
node_modules
|
||||
tests
|
||||
vitest.config.js
|
||||
data
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
env:
|
||||
IMAGE: ${{ vars.REGISTRY_HOST }}/${{ gitea.repository_owner }}/catalyst
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
if: startsWith(gitea.ref, 'refs/tags/v')
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha,prefix=,format=short
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Log in to Gitea registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ vars.REGISTRY_HOST }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: startsWith(gitea.ref, 'refs/tags/v')
|
||||
|
||||
steps:
|
||||
- name: Create release
|
||||
run: |
|
||||
curl -sf -X POST \
|
||||
-H "Authorization: token ${{ secrets.TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/releases" \
|
||||
-d "{
|
||||
\"tag_name\": \"${{ gitea.ref_name }}\",
|
||||
\"name\": \"Catalyst ${{ gitea.ref_name }}\",
|
||||
\"body\": \"### Image\n\n\`${{ env.IMAGE }}:${{ gitea.ref_name }}\`\",
|
||||
\"draft\": false,
|
||||
\"prerelease\": false
|
||||
}"
|
||||
53
.gitea/workflows/ci.yml
Normal file
53
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
|
||||
env:
|
||||
IMAGE: ${{ vars.REGISTRY_HOST }}/${{ gitea.repository_owner }}/catalyst
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
|
||||
- run: npm ci
|
||||
|
||||
- run: npm test
|
||||
|
||||
build-dev:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/dev'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ vars.REGISTRY_HOST }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: Compute short SHA
|
||||
run: echo "SHORT_SHA=$(git rev-parse --short HEAD)" >> $GITEA_ENV
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
build-args: BUILD_VERSION=dev-${{ env.SHORT_SHA }}
|
||||
tags: |
|
||||
${{ env.IMAGE }}:dev
|
||||
${{ env.IMAGE }}:dev-${{ gitea.sha }}
|
||||
109
.gitea/workflows/release.yml
Normal file
109
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,109 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
env:
|
||||
IMAGE: ${{ vars.REGISTRY_HOST }}/${{ gitea.repository_owner }}/catalyst
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
|
||||
- name: Read version
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
echo "VERSION=${VERSION}" >> $GITEA_ENV
|
||||
|
||||
- name: Assert tag does not exist
|
||||
run: |
|
||||
if git ls-remote --tags origin "refs/tags/v${{ env.VERSION }}" | grep -q .; then
|
||||
echo "ERROR: tag v${{ env.VERSION }} already exists — bump version in package.json before merging to main."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create and push tag
|
||||
run: |
|
||||
git config user.name "gitea-actions"
|
||||
git config user.email "actions@gitea"
|
||||
git tag "v${{ env.VERSION }}"
|
||||
git push origin "v${{ env.VERSION }}"
|
||||
|
||||
- name: Generate release notes
|
||||
run: |
|
||||
LAST_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
if [ -n "$LAST_TAG" ]; then
|
||||
git log "${LAST_TAG}..HEAD" --pretty=format:"- %s" --no-merges > /tmp/release_notes.txt
|
||||
else
|
||||
git log --pretty=format:"- %s" --no-merges > /tmp/release_notes.txt
|
||||
fi
|
||||
|
||||
- name: Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}},value=v${{ env.VERSION }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=v${{ env.VERSION }}
|
||||
type=sha,prefix=,format=short
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Log in to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ vars.REGISTRY_HOST }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
- name: Create Gitea release
|
||||
run: |
|
||||
cat > /tmp/make_release.py << 'PYEOF'
|
||||
import json, os
|
||||
v = os.environ['VERSION']
|
||||
img = os.environ['IMAGE']
|
||||
raw = open('/tmp/release_notes.txt').read().strip()
|
||||
feats, fixes = [], []
|
||||
for line in raw.splitlines():
|
||||
msg = line.lstrip('- ').strip()
|
||||
if msg.startswith('feat:'):
|
||||
feats.append('- ' + msg[5:].strip())
|
||||
elif msg.startswith('fix:'):
|
||||
fixes.append('- ' + msg[4:].strip())
|
||||
sections = []
|
||||
if feats:
|
||||
sections.append('### New Features\n\n' + '\n'.join(feats))
|
||||
if fixes:
|
||||
sections.append('### Bug Fixes\n\n' + '\n'.join(fixes))
|
||||
notes = '\n\n'.join(sections) or '_No changes_'
|
||||
body = notes + '\n\n### Image\n\n`' + img + ':' + v + '`'
|
||||
payload = {'tag_name': 'v'+v, 'name': 'Catalyst v'+v, 'body': body, 'draft': False, 'prerelease': False}
|
||||
open('/tmp/release_body.json', 'w').write(json.dumps(payload))
|
||||
PYEOF
|
||||
python3 /tmp/make_release.py
|
||||
curl -sf -X POST \
|
||||
-H "Authorization: token ${{ secrets.TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/releases" \
|
||||
--data @/tmp/release_body.json
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,2 +1,4 @@
|
||||
node_modules/
|
||||
js/version.js
|
||||
data/*.db
|
||||
data/*.db-shm
|
||||
data/*.db-wal
|
||||
|
||||
28
Dockerfile
28
Dockerfile
@@ -1,6 +1,22 @@
|
||||
FROM nginx:alpine
|
||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||
COPY . /usr/share/nginx/html
|
||||
RUN awk -F'"' '/"version"/{printf "const VERSION = \"%s\";\n", $4; exit}' \
|
||||
/usr/share/nginx/html/package.json \
|
||||
> /usr/share/nginx/html/js/version.js
|
||||
FROM node:lts-alpine
|
||||
RUN addgroup -S app && adduser -S app -G app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --omit=dev
|
||||
|
||||
COPY . .
|
||||
ARG BUILD_VERSION=""
|
||||
RUN if [ -n "$BUILD_VERSION" ]; then \
|
||||
printf 'const VERSION = "%s";\n' "$BUILD_VERSION" > js/version.js; \
|
||||
else \
|
||||
awk -F'"' '/"version"/{printf "const VERSION = \"%s\";\n", $4; exit}' \
|
||||
package.json > js/version.js; \
|
||||
fi
|
||||
|
||||
RUN mkdir -p /app/data && chown -R app:app /app
|
||||
USER app
|
||||
|
||||
EXPOSE 3000
|
||||
CMD ["node", "server/server.js"]
|
||||
|
||||
349
README.md
349
README.md
@@ -1,134 +1,243 @@
|
||||
# Catalyst
|
||||
|
||||
A lightweight instance registry for tracking self-hosted infrastructure. No backend, no framework — just a browser, a SQLite database compiled to WebAssembly, and a static file server. :)
|
||||
|
||||
## Structure
|
||||
|
||||
```
|
||||
index.html Entry point
|
||||
css/app.css Styles
|
||||
js/
|
||||
config.js Service definitions and seed data
|
||||
db.js Data layer
|
||||
ui.js Rendering, modals, notifications
|
||||
app.js Router
|
||||
```
|
||||
|
||||
## Data layer
|
||||
|
||||
All reads and writes go through five functions in `js/db.js`. This is the boundary that would be replaced when wiring Catalyst to a real backend — nothing else in the codebase touches data directly.
|
||||
|
||||
### `getInstances(filters?)`
|
||||
|
||||
Returns an array of instances, sorted by name. All filters are optional.
|
||||
|
||||
```js
|
||||
getInstances()
|
||||
getInstances({ search: 'plex' })
|
||||
getInstances({ state: 'degraded' })
|
||||
getInstances({ stack: 'production' })
|
||||
getInstances({ search: 'home', state: 'deployed', stack: 'production' })
|
||||
```
|
||||
|
||||
`search` matches against `name`, `vmid`, and `stack`.
|
||||
|
||||
### `getInstance(vmid)`
|
||||
|
||||
Returns a single instance by VMID, or `null` if not found.
|
||||
|
||||
```js
|
||||
getInstance(137) // → { id, name, vmid, state, stack, ...services, createdAt, updatedAt }
|
||||
```
|
||||
|
||||
### `getDistinctStacks()`
|
||||
|
||||
Returns a sorted array of unique stack names present in the registry. Used to populate the stack filter dynamically.
|
||||
|
||||
```js
|
||||
getDistinctStacks() // → ['development', 'production']
|
||||
```
|
||||
|
||||
### `createInstance(data)`
|
||||
|
||||
Inserts a new instance. Returns `{ ok: true }` on success or `{ ok: false, error }` on failure (e.g. duplicate VMID).
|
||||
|
||||
```js
|
||||
createInstance({
|
||||
name: 'plex',
|
||||
vmid: 117,
|
||||
state: 'deployed', // 'deployed' | 'testing' | 'degraded'
|
||||
stack: 'production',
|
||||
tailscale_ip: '100.64.0.1',
|
||||
atlas: 1,
|
||||
argus: 1,
|
||||
semaphore: 0,
|
||||
patchmon: 1,
|
||||
tailscale: 1,
|
||||
andromeda: 0,
|
||||
hardware_acceleration: 1,
|
||||
})
|
||||
```
|
||||
|
||||
### `updateInstance(id, data)`
|
||||
|
||||
Updates an existing instance by internal `id`. Accepts the same shape as `createInstance`. Returns `{ ok: true }` or `{ ok: false, error }`.
|
||||
|
||||
### `deleteInstance(id)`
|
||||
|
||||
Deletes an instance by internal `id`. Only instances on the `development` stack can be deleted — this is enforced in the UI before `deleteInstance` is ever called.
|
||||
A self-hosted infrastructure registry for homelab Proxmox environments. Track virtual machines across stacks, monitor service health, and maintain a full audit log of every configuration change.
|
||||
|
||||
---
|
||||
|
||||
## Instance shape
|
||||
## Features
|
||||
|
||||
| Field | Type | Notes |
|
||||
- **Dashboard** — filterable, searchable instance list with state and stack badges
|
||||
- **Detail pages** — per-instance view with service flags, Tailscale IP, and a full change timeline
|
||||
- **Audit log** — every field change is recorded with before/after values and a timestamp
|
||||
- **Full CRUD** — add, edit, and delete instances via a clean modal interface
|
||||
- **Production safeguard** — only development instances can be deleted; production instances must be demoted first
|
||||
- **Export / import** — JSON backup and restore via the settings modal
|
||||
- **REST API** — every operation is a plain HTTP call
|
||||
- **Persistent storage** — SQLite on a Docker named volume; survives restarts and upgrades
|
||||
- **Zero native dependencies** — SQLite via Node's built-in `node:sqlite`; no compilation, no binaries
|
||||
|
||||
---
|
||||
|
||||
## Quick start
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
### Environment variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|---|---|---|
|
||||
| `id` | integer | Internal autoincrement ID |
|
||||
| `vmid` | integer | Unique. Used as the public identifier and in URLs (`/instance/137`) |
|
||||
| `name` | string | Display name |
|
||||
| `state` | string | `deployed`, `testing`, or `degraded` |
|
||||
| `stack` | string | `production` or `development` |
|
||||
| `tailscale_ip` | string | Optional |
|
||||
| `atlas` | 0 \| 1 | |
|
||||
| `argus` | 0 \| 1 | |
|
||||
| `semaphore` | 0 \| 1 | |
|
||||
| `patchmon` | 0 \| 1 | |
|
||||
| `tailscale` | 0 \| 1 | |
|
||||
| `andromeda` | 0 \| 1 | |
|
||||
| `hardware_acceleration` | 0 \| 1 | |
|
||||
| `createdAt` | ISO string | Set on insert |
|
||||
| `updatedAt` | ISO string | Updated on every write |
|
||||
| `PORT` | `3000` | HTTP port the server binds to |
|
||||
| `DB_PATH` | `data/catalyst.db` | Path to the SQLite database file |
|
||||
|
||||
---
|
||||
|
||||
## REST API
|
||||
|
||||
All endpoints are under `/api`. Request and response bodies are JSON.
|
||||
|
||||
### Instances
|
||||
|
||||
#### `GET /api/instances`
|
||||
|
||||
Returns all instances sorted by name. All query parameters are optional.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|---|---|---|
|
||||
| `search` | string | Partial match on `name`, `vmid`, or `stack` |
|
||||
| `state` | string | Exact match: `deployed`, `testing`, `degraded` |
|
||||
| `stack` | string | Exact match: `production`, `development` |
|
||||
|
||||
```
|
||||
GET /api/instances?search=plex&state=deployed
|
||||
```
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"vmid": 117,
|
||||
"name": "plex",
|
||||
"state": "deployed",
|
||||
"stack": "production",
|
||||
"tailscale_ip": "100.64.0.1",
|
||||
"atlas": 1, "argus": 1, "semaphore": 0,
|
||||
"patchmon": 1, "tailscale": 1, "andromeda": 0,
|
||||
"hardware_acceleration": 1,
|
||||
"created_at": "2024-01-15T10:30:00",
|
||||
"updated_at": "2024-03-10T14:22:00"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### `GET /api/instances/stacks`
|
||||
|
||||
Returns a sorted array of distinct stack names present in the registry.
|
||||
|
||||
```
|
||||
GET /api/instances/stacks
|
||||
→ ["development", "production"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### `GET /api/instances/:vmid`
|
||||
|
||||
Returns a single instance by VMID.
|
||||
|
||||
| Status | Condition |
|
||||
|---|---|
|
||||
| `200` | Instance found |
|
||||
| `400` | VMID is not a valid integer |
|
||||
| `404` | No instance with that VMID |
|
||||
|
||||
---
|
||||
|
||||
#### `GET /api/instances/:vmid/history`
|
||||
|
||||
Returns the audit log for an instance — newest events first.
|
||||
|
||||
| Status | Condition |
|
||||
|---|---|
|
||||
| `200` | History returned (may be empty array) |
|
||||
| `400` | VMID is not a valid integer |
|
||||
| `404` | No instance with that VMID |
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"vmid": 117,
|
||||
"field": "state",
|
||||
"old_value": "testing",
|
||||
"new_value": "deployed",
|
||||
"changed_at": "2024-03-10T14:22:00"
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"vmid": 117,
|
||||
"field": "created",
|
||||
"old_value": null,
|
||||
"new_value": null,
|
||||
"changed_at": "2024-01-15T10:30:00"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### `POST /api/instances`
|
||||
|
||||
Creates a new instance. Returns the created record.
|
||||
|
||||
| Status | Condition |
|
||||
|---|---|
|
||||
| `201` | Created successfully |
|
||||
| `400` | Validation error — see `errors` array in response |
|
||||
| `409` | VMID already exists |
|
||||
|
||||
**Request body:**
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `name` | string | yes | |
|
||||
| `vmid` | integer | yes | Must be > 0 and unique |
|
||||
| `state` | string | yes | `deployed`, `testing`, or `degraded` |
|
||||
| `stack` | string | yes | `production` or `development` |
|
||||
| `tailscale_ip` | string | no | Valid IPv4 or empty string |
|
||||
| `atlas` | 0\|1 | no | |
|
||||
| `argus` | 0\|1 | no | |
|
||||
| `semaphore` | 0\|1 | no | |
|
||||
| `patchmon` | 0\|1 | no | |
|
||||
| `tailscale` | 0\|1 | no | |
|
||||
| `andromeda` | 0\|1 | no | |
|
||||
| `hardware_acceleration` | 0\|1 | no | |
|
||||
|
||||
---
|
||||
|
||||
#### `PUT /api/instances/:vmid`
|
||||
|
||||
Replaces all fields on an existing instance. Accepts the same body shape as `POST`. The `vmid` in the body may differ from the URL — this is how you change a VMID.
|
||||
|
||||
| Status | Condition |
|
||||
|---|---|
|
||||
| `200` | Updated successfully |
|
||||
| `400` | Validation error |
|
||||
| `404` | No instance with that VMID |
|
||||
| `409` | New VMID conflicts with an existing instance |
|
||||
|
||||
---
|
||||
|
||||
#### `DELETE /api/instances/:vmid`
|
||||
|
||||
Deletes an instance. Only instances on the `development` stack may be deleted.
|
||||
|
||||
| Status | Condition |
|
||||
|---|---|
|
||||
| `204` | Deleted successfully |
|
||||
| `400` | VMID is not a valid integer |
|
||||
| `404` | No instance with that VMID |
|
||||
| `422` | Instance is on the `production` stack |
|
||||
|
||||
---
|
||||
|
||||
### Backup
|
||||
|
||||
#### `GET /api/export`
|
||||
|
||||
Downloads a JSON backup of all instances as a file attachment.
|
||||
|
||||
```json
|
||||
{
|
||||
"version": 1,
|
||||
"exported_at": "2024-03-10T14:22:00.000Z",
|
||||
"instances": [ ... ]
|
||||
}
|
||||
```
|
||||
|
||||
#### `POST /api/import`
|
||||
|
||||
Replaces all instances from a JSON backup. Validates every row before committing — if any row is invalid the entire import is rejected.
|
||||
|
||||
| Status | Condition |
|
||||
|---|---|
|
||||
| `200` | Import successful — returns `{ "imported": N }` |
|
||||
| `400` | Body missing `instances` array, or validation errors |
|
||||
|
||||
---
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npm test # run all tests once
|
||||
npm run test:watch # watch mode
|
||||
npm start # start the server on :3000
|
||||
```
|
||||
|
||||
Tests are split across three files:
|
||||
|
||||
| File | What it covers |
|
||||
|---|---|
|
||||
| `tests/db.test.js` | SQLite data layer — CRUD, constraints, filters, history logging |
|
||||
| `tests/api.test.js` | HTTP API — all endpoints, status codes, error cases |
|
||||
| `tests/helpers.test.js` | UI helpers — `esc()` XSS contract, date formatting, history formatters |
|
||||
|
||||
---
|
||||
|
||||
## Versioning
|
||||
|
||||
Catalyst uses [semantic versioning](https://semver.org). The version in `package.json` is the source of truth and must match the release tag.
|
||||
Catalyst uses [semantic versioning](https://semver.org). `package.json` is the single source of truth.
|
||||
|
||||
| Change | Bump | Example |
|
||||
|---|---|---|
|
||||
| Bug fix | patch | `1.0.0` → `1.0.1` |
|
||||
| New feature, backward compatible | minor | `1.0.0` → `1.1.0` |
|
||||
| Breaking change | major | `1.0.0` → `2.0.0` |
|
||||
| Change | Bump |
|
||||
|---|---|
|
||||
| Bug fix | patch |
|
||||
| New feature, backward compatible | minor |
|
||||
| Breaking change | major |
|
||||
|
||||
### Cutting a release
|
||||
|
||||
**1. Bump the version in `package.json`**
|
||||
```json
|
||||
"version": "1.1.0"
|
||||
```
|
||||
|
||||
**2. Commit, tag, and push**
|
||||
```bash
|
||||
git add package.json
|
||||
git commit -m "chore: release v1.1.0"
|
||||
git tag v1.1.0
|
||||
git push && git push --tags
|
||||
```
|
||||
|
||||
Pushing the tag triggers the full pipeline: tests → build → release.
|
||||
|
||||
- The image is tagged `:1.1.0`, `:1.1`, and `:latest` in the Gitea registry
|
||||
- A Gitea release is created at `v1.1.0` with the image reference in the release notes
|
||||
|
||||
Pushes to `main` without a tag still run tests and build a `:latest` image — no release is created.
|
||||
Pushing a tag triggers the CI pipeline: **test → build → release**.
|
||||
Docker images are tagged `:x.y.z`, `:x.y`, and `:latest`.
|
||||
|
||||
145
css/app.css
145
css/app.css
@@ -25,6 +25,10 @@
|
||||
--mono: 'JetBrains Mono', 'IBM Plex Mono', monospace;
|
||||
}
|
||||
|
||||
html {
|
||||
zoom: 1.1;
|
||||
}
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
background: var(--bg);
|
||||
@@ -70,6 +74,19 @@ nav {
|
||||
|
||||
.nav-sep { flex: 1; }
|
||||
|
||||
.nav-btn {
|
||||
background: none;
|
||||
border: 1px solid var(--border2);
|
||||
color: var(--text2);
|
||||
border-radius: 6px;
|
||||
padding: 4px 8px;
|
||||
font-size: 14px;
|
||||
cursor: pointer;
|
||||
margin-left: 10px;
|
||||
line-height: 1;
|
||||
}
|
||||
.nav-btn:hover { border-color: var(--accent); color: var(--accent); }
|
||||
|
||||
.nav-divider { color: var(--border2); }
|
||||
|
||||
.nav-status {
|
||||
@@ -289,6 +306,7 @@ select:focus { border-color: var(--accent); }
|
||||
border-radius: 3px;
|
||||
letter-spacing: 0.08em;
|
||||
text-transform: uppercase;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.badge.deployed { background: var(--accent2); color: var(--accent); }
|
||||
@@ -360,16 +378,25 @@ select:focus { border-color: var(--accent); }
|
||||
}
|
||||
|
||||
.detail-sub {
|
||||
font-size: 12px;
|
||||
color: var(--text3);
|
||||
margin-top: 6px;
|
||||
font-size: 13px;
|
||||
margin-top: 8px;
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
gap: 0;
|
||||
}
|
||||
|
||||
.detail-sub span { display: flex; gap: 4px; }
|
||||
.detail-sub .lbl { color: var(--text3); }
|
||||
.detail-sub .val { color: var(--text2); }
|
||||
.detail-sub > span {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
.detail-sub > span + span {
|
||||
margin-left: 12px;
|
||||
padding-left: 12px;
|
||||
border-left: 1px solid var(--border);
|
||||
}
|
||||
.detail-sub .lbl { color: var(--text3); font-size: 11px; }
|
||||
.detail-sub .val { color: var(--text); }
|
||||
|
||||
.detail-actions { display: flex; gap: 8px; }
|
||||
|
||||
@@ -614,6 +641,58 @@ select:focus { border-color: var(--accent); }
|
||||
|
||||
.confirm-actions { display: flex; justify-content: flex-end; gap: 10px; }
|
||||
|
||||
/* ── HISTORY TIMELINE ── */
|
||||
.tl-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 24px;
|
||||
padding: 9px 0;
|
||||
border-bottom: 1px solid var(--border);
|
||||
}
|
||||
.tl-item:last-child { border-bottom: none; }
|
||||
.tl-event { display: flex; align-items: center; gap: 7px; font-size: 13px; min-width: 0; }
|
||||
.tl-label { color: var(--text2); }
|
||||
.tl-sep { color: var(--text3); user-select: none; }
|
||||
.tl-old { color: var(--text3); text-decoration: line-through; font-size: 12px; }
|
||||
.tl-arrow { color: var(--text3); font-size: 11px; }
|
||||
.tl-new { color: var(--text); font-weight: 500; }
|
||||
.tl-time { color: var(--text3); font-size: 11px; white-space: nowrap; flex-shrink: 0; }
|
||||
.tl-deployed { color: var(--accent); }
|
||||
.tl-testing { color: var(--amber); }
|
||||
.tl-degraded { color: var(--red); }
|
||||
.tl-created .tl-event { color: var(--accent); font-weight: 500; }
|
||||
.tl-empty { color: var(--text3); font-size: 12px; padding: 8px 0; }
|
||||
|
||||
/* ── SETTINGS MODAL ── */
|
||||
#settings-modal .modal-body { padding-top: 0; }
|
||||
.settings-section { padding: 16px 0; border-bottom: 1px solid var(--border); }
|
||||
.settings-section:last-child { border-bottom: none; padding-bottom: 0; }
|
||||
.settings-section-title {
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.1em;
|
||||
color: var(--text3);
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
.settings-desc { font-size: 12px; color: var(--text2); margin: 0 0 14px; line-height: 1.6; }
|
||||
.settings-row { display: flex; align-items: center; gap: 12px; }
|
||||
.settings-label { font-size: 13px; color: var(--text2); white-space: nowrap; min-width: 80px; }
|
||||
.settings-select { flex: 1; }
|
||||
.import-row { display: flex; gap: 10px; align-items: center; }
|
||||
.import-file-input { flex: 1; }
|
||||
|
||||
.btn-secondary {
|
||||
background: var(--bg3);
|
||||
border-color: var(--border2);
|
||||
color: var(--text);
|
||||
}
|
||||
.btn-secondary:hover { border-color: var(--accent); color: var(--accent); }
|
||||
|
||||
.btn-danger { background: var(--red2); border-color: var(--red); color: var(--text); }
|
||||
.btn-danger:hover { background: var(--red); }
|
||||
|
||||
/* ── SCROLLBAR ── */
|
||||
::-webkit-scrollbar { width: 6px; }
|
||||
::-webkit-scrollbar-track { background: var(--bg); }
|
||||
@@ -633,3 +712,55 @@ select:focus { border-color: var(--accent); }
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0; }
|
||||
}
|
||||
|
||||
/* ── MOBILE ── */
|
||||
@media (max-width: 640px) {
|
||||
/* Reset desktop zoom — mobile browsers handle scaling themselves */
|
||||
html { zoom: 1; }
|
||||
|
||||
/* Nav */
|
||||
nav { padding: 0 16px; }
|
||||
|
||||
/* Dashboard header */
|
||||
.dash-header { padding: 18px 16px 14px; }
|
||||
|
||||
/* Stats bar */
|
||||
.stat-cell { padding: 10px 16px; }
|
||||
|
||||
/* Toolbar — search full-width on first row, filters + button below */
|
||||
.toolbar { flex-wrap: wrap; padding: 10px 16px; gap: 8px; }
|
||||
.search-wrap { max-width: 100%; }
|
||||
.toolbar-right { margin-left: 0; width: 100%; justify-content: flex-end; }
|
||||
|
||||
/* Instance grid — single column */
|
||||
.instance-grid {
|
||||
grid-template-columns: 1fr;
|
||||
padding: 12px 16px;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
/* Detail page */
|
||||
.detail-page { padding: 16px; }
|
||||
|
||||
/* Detail header — stack title block above actions */
|
||||
.detail-header { flex-direction: column; align-items: flex-start; gap: 14px; }
|
||||
|
||||
/* Detail sub — wrap items when they don't fit */
|
||||
.detail-sub { flex-wrap: wrap; row-gap: 4px; }
|
||||
|
||||
/* Detail grid — single column */
|
||||
.detail-grid { grid-template-columns: 1fr; }
|
||||
|
||||
/* Toggle grid — 2 columns instead of 3 */
|
||||
.toggle-grid { grid-template-columns: 1fr 1fr; }
|
||||
|
||||
/* Confirm box — no fixed width on mobile */
|
||||
.confirm-box { width: auto; max-width: calc(100vw - 32px); padding: 18px; }
|
||||
|
||||
/* History timeline — stack timestamp above event */
|
||||
.tl-item { flex-direction: column; align-items: flex-start; gap: 3px; }
|
||||
.tl-time { order: -1; }
|
||||
|
||||
/* Toast — stretch across bottom */
|
||||
.toast { right: 16px; left: 16px; bottom: 16px; }
|
||||
}
|
||||
|
||||
0
data/.gitkeep
Normal file
0
data/.gitkeep
Normal file
@@ -3,4 +3,11 @@ services:
|
||||
image: ${REGISTRY:-gitea.thewrightserver.net/josh}/catalyst:${TAG:-latest}
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${PORT:-3000}:80"
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- catalyst-data:/app/data
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
|
||||
volumes:
|
||||
catalyst-data:
|
||||
|
||||
37
index.html
37
index.html
@@ -3,6 +3,7 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<base href="/">
|
||||
<title>Catalyst</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
@@ -21,6 +22,7 @@
|
||||
<span class="nav-divider">·</span>
|
||||
<span id="nav-version"></span>
|
||||
</div>
|
||||
<button class="nav-btn" onclick="openSettingsModal()" title="Settings">⚙</button>
|
||||
</nav>
|
||||
|
||||
<main>
|
||||
@@ -90,7 +92,7 @@
|
||||
<div class="services-grid" id="detail-services"></div>
|
||||
</div>
|
||||
<div class="detail-section full">
|
||||
<div class="section-title">timestamps</div>
|
||||
<div class="section-title">history</div>
|
||||
<div id="detail-timestamps"></div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -170,13 +172,44 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- SETTINGS MODAL -->
|
||||
<div id="settings-modal" class="modal-overlay">
|
||||
<div class="modal">
|
||||
<div class="modal-header">
|
||||
<span class="modal-title">Settings</span>
|
||||
<button class="modal-close" onclick="closeSettingsModal()">✕</button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="settings-section">
|
||||
<div class="settings-section-title">Display</div>
|
||||
<div class="settings-row">
|
||||
<label class="settings-label" for="tz-select">Timezone</label>
|
||||
<select id="tz-select" class="form-input settings-select"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="settings-section">
|
||||
<div class="settings-section-title">Export</div>
|
||||
<p class="settings-desc">Download all instance data as a JSON backup file.</p>
|
||||
<button class="btn btn-secondary" onclick="exportDB()">Export Database</button>
|
||||
</div>
|
||||
<div class="settings-section">
|
||||
<div class="settings-section-title">Import</div>
|
||||
<p class="settings-desc">Restore from a backup file. This replaces all current instances.</p>
|
||||
<div class="import-row">
|
||||
<input type="file" id="import-file" accept=".json" class="form-input import-file-input">
|
||||
<button class="btn btn-danger" onclick="importDB()">Import</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- TOAST -->
|
||||
<div class="toast" id="toast">
|
||||
<div class="toast-dot"></div>
|
||||
<span id="toast-msg"></span>
|
||||
</div>
|
||||
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/sql.js/1.10.2/sql-wasm.js"></script>
|
||||
<script src="js/version.js" onerror="window.VERSION=null"></script>
|
||||
<script src="js/config.js"></script>
|
||||
<script src="js/db.js"></script>
|
||||
|
||||
@@ -21,6 +21,7 @@ function handleRoute() {
|
||||
renderDetailPage(parseInt(m[1], 10));
|
||||
} else {
|
||||
document.getElementById('page-dashboard').classList.add('active');
|
||||
renderDashboard();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,9 +38,9 @@ window.addEventListener('popstate', e => {
|
||||
|
||||
// ── Bootstrap ─────────────────────────────────────────────────────────────────
|
||||
|
||||
if (VERSION) document.getElementById('nav-version').textContent = `v${VERSION}`;
|
||||
if (VERSION) {
|
||||
const label = /^\d/.test(VERSION) ? `v${VERSION}` : VERSION;
|
||||
document.getElementById('nav-version').textContent = label;
|
||||
}
|
||||
|
||||
initDB().then(() => {
|
||||
renderDashboard();
|
||||
handleRoute();
|
||||
});
|
||||
|
||||
19
js/config.js
19
js/config.js
@@ -1,21 +1,6 @@
|
||||
// Services shown as dots on instance cards (all tracked services)
|
||||
// Services shown as dots on instance cards
|
||||
const CARD_SERVICES = ['atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda'];
|
||||
|
||||
// Services shown in the detail page service grid
|
||||
// (tailscale is shown separately under "network" alongside its IP)
|
||||
// (tailscale lives in the network section alongside its IP)
|
||||
const DETAIL_SERVICES = ['atlas', 'argus', 'semaphore', 'patchmon', 'andromeda'];
|
||||
|
||||
const SQL_JS_CDN = 'https://cdnjs.cloudflare.com/ajax/libs/sql.js/1.10.2/';
|
||||
|
||||
const STORAGE_KEY = 'catalyst_db';
|
||||
|
||||
const SEED = [
|
||||
{ name: 'plex', state: 'deployed', stack: 'production', vmid: 117, atlas: true, argus: true, semaphore: false, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.1', hardware_acceleration: true },
|
||||
{ name: 'foldergram', state: 'testing', stack: 'development', vmid: 137, atlas: false, argus: false, semaphore: false, patchmon: false, tailscale: false, andromeda: false, tailscale_ip: '', hardware_acceleration: false },
|
||||
{ name: 'homeassistant', state: 'deployed', stack: 'production', vmid: 102, atlas: true, argus: true, semaphore: true, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.5', hardware_acceleration: false },
|
||||
{ name: 'gitea', state: 'deployed', stack: 'production', vmid: 110, atlas: true, argus: false, semaphore: true, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.8', hardware_acceleration: false },
|
||||
{ name: 'postgres-primary', state: 'degraded', stack: 'production', vmid: 201, atlas: true, argus: true, semaphore: false, patchmon: true, tailscale: false, andromeda: true, tailscale_ip: '', hardware_acceleration: false },
|
||||
{ name: 'nextcloud', state: 'testing', stack: 'development', vmid: 144, atlas: false, argus: false, semaphore: false, patchmon: false, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.12', hardware_acceleration: false },
|
||||
{ name: 'traefik', state: 'deployed', stack: 'production', vmid: 100, atlas: true, argus: true, semaphore: false, patchmon: true, tailscale: true, andromeda: false, tailscale_ip: '100.64.0.2', hardware_acceleration: false },
|
||||
{ name: 'monitoring-stack', state: 'testing', stack: 'development', vmid: 155, atlas: false, argus: false, semaphore: true, patchmon: false, tailscale: false, andromeda: false, tailscale_ip: '', hardware_acceleration: false },
|
||||
];
|
||||
|
||||
183
js/db.js
183
js/db.js
@@ -1,159 +1,62 @@
|
||||
let db = null;
|
||||
// API client — replaces the sql.js database layer.
|
||||
// Swap these fetch() calls for any other transport when needed.
|
||||
|
||||
// ── Persistence ──────────────────────────────────────────────────────────────
|
||||
const BASE = '/api';
|
||||
|
||||
function saveToStorage() {
|
||||
try {
|
||||
const data = db.export(); // Uint8Array
|
||||
let binary = '';
|
||||
const chunk = 8192;
|
||||
for (let i = 0; i < data.length; i += chunk) {
|
||||
binary += String.fromCharCode(...data.subarray(i, i + chunk));
|
||||
}
|
||||
localStorage.setItem(STORAGE_KEY, btoa(binary));
|
||||
} catch (e) {
|
||||
console.warn('catalyst: failed to persist database', e);
|
||||
}
|
||||
async function api(path, options = {}) {
|
||||
const res = await fetch(BASE + path, options);
|
||||
if (res.status === 204) return null;
|
||||
return res.json().then(data => ({ ok: res.ok, status: res.status, data }));
|
||||
}
|
||||
|
||||
function loadFromStorage() {
|
||||
try {
|
||||
const stored = localStorage.getItem(STORAGE_KEY);
|
||||
if (!stored) return null;
|
||||
const binary = atob(stored);
|
||||
const buf = new Uint8Array(binary.length);
|
||||
for (let i = 0; i < binary.length; i++) buf[i] = binary.charCodeAt(i);
|
||||
return buf;
|
||||
} catch (e) {
|
||||
console.warn('catalyst: failed to load database from storage', e);
|
||||
return null;
|
||||
}
|
||||
// ── Queries ───────────────────────────────────────────────────────────────────
|
||||
|
||||
async function getInstances(filters = {}) {
|
||||
const params = new URLSearchParams(
|
||||
Object.entries(filters).filter(([, v]) => v)
|
||||
);
|
||||
const res = await fetch(`${BASE}/instances?${params}`);
|
||||
return res.json();
|
||||
}
|
||||
|
||||
// ── Init ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
async function initDB() {
|
||||
const SQL = await initSqlJs({ locateFile: f => SQL_JS_CDN + f });
|
||||
|
||||
const saved = loadFromStorage();
|
||||
if (saved) {
|
||||
db = new SQL.Database(saved);
|
||||
return;
|
||||
async function getInstance(vmid) {
|
||||
const res = await fetch(`${BASE}/instances/${vmid}`);
|
||||
if (res.status === 404) return null;
|
||||
return res.json();
|
||||
}
|
||||
|
||||
db = new SQL.Database();
|
||||
db.run(`
|
||||
CREATE TABLE instances (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
state TEXT DEFAULT 'deployed',
|
||||
stack TEXT DEFAULT '',
|
||||
vmid INTEGER UNIQUE NOT NULL,
|
||||
atlas INTEGER DEFAULT 0,
|
||||
argus INTEGER DEFAULT 0,
|
||||
semaphore INTEGER DEFAULT 0,
|
||||
patchmon INTEGER DEFAULT 0,
|
||||
tailscale INTEGER DEFAULT 0,
|
||||
andromeda INTEGER DEFAULT 0,
|
||||
tailscale_ip TEXT DEFAULT '',
|
||||
hardware_acceleration INTEGER DEFAULT 0,
|
||||
createdAt TEXT DEFAULT (datetime('now')),
|
||||
updatedAt TEXT DEFAULT (datetime('now'))
|
||||
)
|
||||
`);
|
||||
|
||||
const stmt = db.prepare(`
|
||||
INSERT INTO instances
|
||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon, tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
SEED.forEach(s => stmt.run([
|
||||
s.name, s.state, s.stack, s.vmid,
|
||||
+s.atlas, +s.argus, +s.semaphore, +s.patchmon,
|
||||
+s.tailscale, +s.andromeda, s.tailscale_ip, +s.hardware_acceleration,
|
||||
]));
|
||||
|
||||
stmt.free();
|
||||
saveToStorage();
|
||||
}
|
||||
|
||||
// ── Queries ──────────────────────────────────────────────────────────────────
|
||||
|
||||
function getInstances(filters = {}) {
|
||||
let sql = 'SELECT * FROM instances WHERE 1=1';
|
||||
const params = [];
|
||||
|
||||
if (filters.search) {
|
||||
sql += ' AND (name LIKE ? OR CAST(vmid AS TEXT) LIKE ? OR stack LIKE ?)';
|
||||
const s = `%${filters.search}%`;
|
||||
params.push(s, s, s);
|
||||
}
|
||||
if (filters.state) { sql += ' AND state = ?'; params.push(filters.state); }
|
||||
if (filters.stack) { sql += ' AND stack = ?'; params.push(filters.stack); }
|
||||
|
||||
sql += ' ORDER BY name ASC';
|
||||
|
||||
const res = db.exec(sql, params);
|
||||
if (!res.length) return [];
|
||||
const cols = res[0].columns;
|
||||
return res[0].values.map(row => Object.fromEntries(cols.map((c, i) => [c, row[i]])));
|
||||
}
|
||||
|
||||
function getInstance(vmid) {
|
||||
const res = db.exec('SELECT * FROM instances WHERE vmid = ?', [vmid]);
|
||||
if (!res.length) return null;
|
||||
const cols = res[0].columns;
|
||||
return Object.fromEntries(cols.map((c, i) => [c, res[0].values[0][i]]));
|
||||
}
|
||||
|
||||
function getDistinctStacks() {
|
||||
const res = db.exec(`SELECT DISTINCT stack FROM instances WHERE stack != '' ORDER BY stack`);
|
||||
if (!res.length) return [];
|
||||
return res[0].values.map(row => row[0]);
|
||||
async function getDistinctStacks() {
|
||||
const res = await fetch(`${BASE}/instances/stacks`);
|
||||
return res.json();
|
||||
}
|
||||
|
||||
// ── Mutations ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function createInstance(data) {
|
||||
try {
|
||||
db.run(
|
||||
`INSERT INTO instances
|
||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon, tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[data.name, data.state, data.stack, data.vmid,
|
||||
data.atlas, data.argus, data.semaphore, data.patchmon,
|
||||
data.tailscale, data.andromeda, data.tailscale_ip, data.hardware_acceleration]
|
||||
);
|
||||
saveToStorage();
|
||||
async function createInstance(data) {
|
||||
const { ok, data: body } = await api('/instances', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
if (!ok) return { ok: false, error: body.error ?? body.errors?.[0] ?? 'error creating instance' };
|
||||
return { ok: true };
|
||||
} catch (e) {
|
||||
return { ok: false, error: e.message };
|
||||
}
|
||||
}
|
||||
|
||||
function updateInstance(id, data) {
|
||||
try {
|
||||
db.run(
|
||||
`UPDATE instances SET
|
||||
name=?, state=?, stack=?, vmid=?,
|
||||
atlas=?, argus=?, semaphore=?, patchmon=?,
|
||||
tailscale=?, andromeda=?, tailscale_ip=?, hardware_acceleration=?,
|
||||
updatedAt=datetime('now')
|
||||
WHERE id=?`,
|
||||
[data.name, data.state, data.stack, data.vmid,
|
||||
data.atlas, data.argus, data.semaphore, data.patchmon,
|
||||
data.tailscale, data.andromeda, data.tailscale_ip, data.hardware_acceleration,
|
||||
id]
|
||||
);
|
||||
saveToStorage();
|
||||
async function updateInstance(vmid, data) {
|
||||
const { ok, data: body } = await api(`/instances/${vmid}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
if (!ok) return { ok: false, error: body.error ?? body.errors?.[0] ?? 'error updating instance' };
|
||||
return { ok: true };
|
||||
} catch (e) {
|
||||
return { ok: false, error: e.message };
|
||||
}
|
||||
}
|
||||
|
||||
function deleteInstance(id) {
|
||||
db.run('DELETE FROM instances WHERE id = ?', [id]);
|
||||
saveToStorage();
|
||||
async function deleteInstance(vmid) {
|
||||
await api(`/instances/${vmid}`, { method: 'DELETE' });
|
||||
}
|
||||
|
||||
async function getInstanceHistory(vmid) {
|
||||
const res = await fetch(`${BASE}/instances/${vmid}/history`);
|
||||
return res.json();
|
||||
}
|
||||
|
||||
234
js/ui.js
234
js/ui.js
@@ -1,8 +1,36 @@
|
||||
// Module-level UI state
|
||||
let editingId = null;
|
||||
let editingVmid = null;
|
||||
let currentVmid = null;
|
||||
let toastTimer = null;
|
||||
|
||||
// ── Timezone ──────────────────────────────────────────────────────────────────
|
||||
|
||||
const TIMEZONES = [
|
||||
{ label: 'UTC', tz: 'UTC' },
|
||||
{ label: 'Hawaii (HST)', tz: 'Pacific/Honolulu' },
|
||||
{ label: 'Alaska (AKT)', tz: 'America/Anchorage' },
|
||||
{ label: 'Pacific (PT)', tz: 'America/Los_Angeles' },
|
||||
{ label: 'Mountain (MT)', tz: 'America/Denver' },
|
||||
{ label: 'Central (CT)', tz: 'America/Chicago' },
|
||||
{ label: 'Eastern (ET)', tz: 'America/New_York' },
|
||||
{ label: 'Atlantic (AT)', tz: 'America/Halifax' },
|
||||
{ label: 'London (GMT/BST)', tz: 'Europe/London' },
|
||||
{ label: 'Paris / Berlin (CET)', tz: 'Europe/Paris' },
|
||||
{ label: 'Helsinki (EET)', tz: 'Europe/Helsinki' },
|
||||
{ label: 'Istanbul (TRT)', tz: 'Europe/Istanbul' },
|
||||
{ label: 'Dubai (GST)', tz: 'Asia/Dubai' },
|
||||
{ label: 'India (IST)', tz: 'Asia/Kolkata' },
|
||||
{ label: 'Singapore (SGT)', tz: 'Asia/Singapore' },
|
||||
{ label: 'China (CST)', tz: 'Asia/Shanghai' },
|
||||
{ label: 'Japan / Korea (JST/KST)', tz: 'Asia/Tokyo' },
|
||||
{ label: 'Sydney (AEST)', tz: 'Australia/Sydney' },
|
||||
{ label: 'Auckland (NZST)', tz: 'Pacific/Auckland' },
|
||||
];
|
||||
|
||||
function getTimezone() {
|
||||
return localStorage.getItem('catalyst_tz') || 'UTC';
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
function esc(str) {
|
||||
@@ -11,24 +39,32 @@ function esc(str) {
|
||||
return d.innerHTML;
|
||||
}
|
||||
|
||||
// SQLite datetime('now') → 'YYYY-MM-DD HH:MM:SS' (UTC, no timezone marker).
|
||||
// Appending 'Z' tells JS to parse it as UTC rather than local time.
|
||||
function parseUtc(d) {
|
||||
if (typeof d !== 'string') return new Date(d);
|
||||
const hasZone = d.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(d);
|
||||
return new Date(hasZone ? d : d.replace(' ', 'T') + 'Z');
|
||||
}
|
||||
|
||||
function fmtDate(d) {
|
||||
if (!d) return '—';
|
||||
try {
|
||||
return new Date(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric' });
|
||||
return parseUtc(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric', timeZone: getTimezone() });
|
||||
} catch (e) { return d; }
|
||||
}
|
||||
|
||||
function fmtDateFull(d) {
|
||||
if (!d) return '—';
|
||||
try {
|
||||
return new Date(d).toLocaleString('en-US', { year: 'numeric', month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit' });
|
||||
return parseUtc(d).toLocaleString('en-US', { year: 'numeric', month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit', timeZone: getTimezone(), timeZoneName: 'short' });
|
||||
} catch (e) { return d; }
|
||||
}
|
||||
|
||||
// ── Dashboard ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function renderDashboard() {
|
||||
const all = getInstances();
|
||||
async function renderDashboard() {
|
||||
const all = await getInstances();
|
||||
document.getElementById('nav-count').textContent = `${all.length} instance${all.length !== 1 ? 's' : ''}`;
|
||||
|
||||
const states = {};
|
||||
@@ -39,18 +75,18 @@ function renderDashboard() {
|
||||
<div class="stat-cell"><div class="stat-label">deployed</div><div class="stat-value">${states['deployed'] || 0}</div></div>
|
||||
<div class="stat-cell"><div class="stat-label">testing</div><div class="stat-value amber">${states['testing'] || 0}</div></div>
|
||||
<div class="stat-cell"><div class="stat-label">degraded</div><div class="stat-value red">${states['degraded'] || 0}</div></div>
|
||||
<div class="stat-cell"><div class="stat-label">stacks</div><div class="stat-value">${getDistinctStacks().length}</div></div>
|
||||
`;
|
||||
|
||||
populateStackFilter();
|
||||
filterInstances();
|
||||
await populateStackFilter();
|
||||
await filterInstances();
|
||||
}
|
||||
|
||||
function populateStackFilter() {
|
||||
async function populateStackFilter() {
|
||||
const select = document.getElementById('filter-stack');
|
||||
const current = select.value;
|
||||
select.innerHTML = '<option value="">all stacks</option>';
|
||||
getDistinctStacks().forEach(s => {
|
||||
const stacks = await getDistinctStacks();
|
||||
stacks.forEach(s => {
|
||||
const opt = document.createElement('option');
|
||||
opt.value = s;
|
||||
opt.textContent = s;
|
||||
@@ -59,11 +95,11 @@ function populateStackFilter() {
|
||||
});
|
||||
}
|
||||
|
||||
function filterInstances() {
|
||||
async function filterInstances() {
|
||||
const search = document.getElementById('search-input').value;
|
||||
const state = document.getElementById('filter-state').value;
|
||||
const stack = document.getElementById('filter-stack').value;
|
||||
const instances = getInstances({ search, state, stack });
|
||||
const instances = await getInstances({ search, state, stack });
|
||||
const grid = document.getElementById('instance-grid');
|
||||
|
||||
if (!instances.length) {
|
||||
@@ -76,7 +112,6 @@ function filterInstances() {
|
||||
`<div class="svc-dot ${inst[s] ? 'on' : ''}" title="${s}"></div>`
|
||||
).join('');
|
||||
const activeCount = CARD_SERVICES.filter(s => inst[s]).length;
|
||||
|
||||
return `
|
||||
<div class="instance-card state-${esc(inst.state)}" onclick="navigate('instance', ${inst.vmid})">
|
||||
<div class="card-top">
|
||||
@@ -100,21 +135,50 @@ function filterInstances() {
|
||||
|
||||
// ── Detail Page ───────────────────────────────────────────────────────────────
|
||||
|
||||
function renderDetailPage(vmid) {
|
||||
const inst = getInstance(vmid);
|
||||
const BOOL_FIELDS = ['atlas','argus','semaphore','patchmon','tailscale','andromeda','hardware_acceleration'];
|
||||
|
||||
const FIELD_LABELS = {
|
||||
name: 'name',
|
||||
state: 'state',
|
||||
stack: 'stack',
|
||||
vmid: 'vmid',
|
||||
tailscale_ip: 'tailscale ip',
|
||||
atlas: 'atlas',
|
||||
argus: 'argus',
|
||||
semaphore: 'semaphore',
|
||||
patchmon: 'patchmon',
|
||||
tailscale: 'tailscale',
|
||||
andromeda: 'andromeda',
|
||||
hardware_acceleration: 'hw acceleration',
|
||||
};
|
||||
|
||||
function stateClass(field, val) {
|
||||
if (field !== 'state') return '';
|
||||
return { deployed: 'tl-deployed', testing: 'tl-testing', degraded: 'tl-degraded' }[val] ?? '';
|
||||
}
|
||||
|
||||
function fmtHistVal(field, val) {
|
||||
if (val == null || val === '') return '—';
|
||||
if (BOOL_FIELDS.includes(field)) return val === '1' ? 'on' : 'off';
|
||||
return esc(val);
|
||||
}
|
||||
|
||||
async function renderDetailPage(vmid) {
|
||||
const [inst, history, all] = await Promise.all([getInstance(vmid), getInstanceHistory(vmid), getInstances()]);
|
||||
if (!inst) { navigate('dashboard'); return; }
|
||||
currentVmid = vmid;
|
||||
document.getElementById('nav-count').textContent = `${all.length} instance${all.length !== 1 ? 's' : ''}`;
|
||||
|
||||
document.getElementById('detail-vmid-crumb').textContent = vmid;
|
||||
document.getElementById('detail-name').textContent = inst.name;
|
||||
document.getElementById('detail-vmid-sub').textContent = inst.vmid;
|
||||
document.getElementById('detail-id-sub').textContent = inst.id;
|
||||
document.getElementById('detail-created-sub').textContent = fmtDate(inst.createdAt);
|
||||
document.getElementById('detail-created-sub').textContent = fmtDate(inst.created_at);
|
||||
|
||||
document.getElementById('detail-identity').innerHTML = `
|
||||
<div class="kv-row"><span class="kv-key">name</span><span class="kv-val highlight">${esc(inst.name)}</span></div>
|
||||
<div class="kv-row"><span class="kv-key">state</span><span class="kv-val"><span class="badge ${esc(inst.state)}">${esc(inst.state)}</span></span></div>
|
||||
<div class="kv-row"><span class="kv-key">stack</span><span class="kv-val highlight">${esc(inst.stack) || '—'}</span></div>
|
||||
<div class="kv-row"><span class="kv-key">stack</span><span class="kv-val"><span class="badge ${esc(inst.stack)}">${esc(inst.stack) || '—'}</span></span></div>
|
||||
<div class="kv-row"><span class="kv-key">vmid</span><span class="kv-val highlight">${inst.vmid}</span></div>
|
||||
<div class="kv-row"><span class="kv-key">internal id</span><span class="kv-val">${inst.id}</span></div>
|
||||
`;
|
||||
@@ -134,10 +198,30 @@ function renderDetailPage(vmid) {
|
||||
</div>
|
||||
`).join('');
|
||||
|
||||
document.getElementById('detail-timestamps').innerHTML = `
|
||||
<div class="kv-row"><span class="kv-key">created</span><span class="kv-val">${fmtDateFull(inst.createdAt)}</span></div>
|
||||
<div class="kv-row"><span class="kv-key">updated</span><span class="kv-val">${fmtDateFull(inst.updatedAt)}</span></div>
|
||||
`;
|
||||
document.getElementById('detail-timestamps').innerHTML = history.length
|
||||
? history.map(e => {
|
||||
if (e.field === 'created') return `
|
||||
<div class="tl-item tl-created">
|
||||
<span class="tl-event">instance created</span>
|
||||
<span class="tl-time">${fmtDateFull(e.changed_at)}</span>
|
||||
</div>`;
|
||||
const label = FIELD_LABELS[e.field] ?? esc(e.field);
|
||||
const newCls = (e.field === 'state' || e.field === 'stack')
|
||||
? `badge ${esc(e.new_value)}`
|
||||
: `tl-new ${stateClass(e.field, e.new_value)}`;
|
||||
return `
|
||||
<div class="tl-item">
|
||||
<div class="tl-event">
|
||||
<span class="tl-label">${label}</span>
|
||||
<span class="tl-sep">·</span>
|
||||
<span class="tl-old">${fmtHistVal(e.field, e.old_value)}</span>
|
||||
<span class="tl-arrow">→</span>
|
||||
<span class="${newCls}">${fmtHistVal(e.field, e.new_value)}</span>
|
||||
</div>
|
||||
<span class="tl-time">${fmtDateFull(e.changed_at)}</span>
|
||||
</div>`;
|
||||
}).join('')
|
||||
: '<div class="tl-empty">no history yet</div>';
|
||||
|
||||
document.getElementById('detail-edit-btn').onclick = () => openEditModal(inst.vmid);
|
||||
document.getElementById('detail-delete-btn').onclick = () => confirmDeleteDialog(inst);
|
||||
@@ -146,16 +230,16 @@ function renderDetailPage(vmid) {
|
||||
// ── Modal ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
function openNewModal() {
|
||||
editingId = null;
|
||||
editingVmid = null;
|
||||
document.getElementById('modal-title').textContent = 'new instance';
|
||||
clearForm();
|
||||
document.getElementById('instance-modal').classList.add('open');
|
||||
}
|
||||
|
||||
function openEditModal(vmid) {
|
||||
const inst = getInstance(vmid);
|
||||
async function openEditModal(vmid) {
|
||||
const inst = await getInstance(vmid);
|
||||
if (!inst) return;
|
||||
editingId = inst.id;
|
||||
editingVmid = inst.vmid;
|
||||
document.getElementById('modal-title').textContent = `edit / ${inst.name}`;
|
||||
document.getElementById('f-name').value = inst.name;
|
||||
document.getElementById('f-vmid').value = inst.vmid;
|
||||
@@ -186,19 +270,18 @@ function clearForm() {
|
||||
.forEach(id => { document.getElementById(id).checked = false; });
|
||||
}
|
||||
|
||||
function saveInstance() {
|
||||
async function saveInstance() {
|
||||
const name = document.getElementById('f-name').value.trim();
|
||||
const vmid = parseInt(document.getElementById('f-vmid').value, 10);
|
||||
const state = document.getElementById('f-state').value;
|
||||
const stack = document.getElementById('f-stack').value;
|
||||
const tip = document.getElementById('f-tailscale-ip').value.trim();
|
||||
|
||||
if (!name) { showToast('name is required', 'error'); return; }
|
||||
if (!vmid || vmid < 1) { showToast('a valid vmid is required', 'error'); return; }
|
||||
|
||||
const data = {
|
||||
name, state, stack, vmid,
|
||||
tailscale_ip: tip,
|
||||
tailscale_ip: document.getElementById('f-tailscale-ip').value.trim(),
|
||||
atlas: +document.getElementById('f-atlas').checked,
|
||||
argus: +document.getElementById('f-argus').checked,
|
||||
semaphore: +document.getElementById('f-semaphore').checked,
|
||||
@@ -208,20 +291,19 @@ function saveInstance() {
|
||||
hardware_acceleration: +document.getElementById('f-hardware-accel').checked,
|
||||
};
|
||||
|
||||
const result = editingId ? updateInstance(editingId, data) : createInstance(data);
|
||||
const result = editingVmid
|
||||
? await updateInstance(editingVmid, data)
|
||||
: await createInstance(data);
|
||||
|
||||
if (!result.ok) {
|
||||
showToast(result.error.includes('UNIQUE') ? 'vmid already exists' : 'error saving instance', 'error');
|
||||
return;
|
||||
}
|
||||
if (!result.ok) { showToast(result.error, 'error'); return; }
|
||||
|
||||
showToast(`${name} ${editingId ? 'updated' : 'created'}`, 'success');
|
||||
showToast(`${name} ${editingVmid ? 'updated' : 'created'}`, 'success');
|
||||
closeModal();
|
||||
|
||||
if (currentVmid && document.getElementById('page-detail').classList.contains('active')) {
|
||||
renderDetailPage(vmid);
|
||||
await renderDetailPage(vmid);
|
||||
} else {
|
||||
renderDashboard();
|
||||
await renderDashboard();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -232,11 +314,10 @@ function confirmDeleteDialog(inst) {
|
||||
showToast(`demote ${inst.name} to development before deleting`, 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
document.getElementById('confirm-title').textContent = `delete ${inst.name}?`;
|
||||
document.getElementById('confirm-msg').textContent =
|
||||
`This will permanently remove instance "${inst.name}" (vmid: ${inst.vmid}) from Catalyst. This action cannot be undone.`;
|
||||
document.getElementById('confirm-ok').onclick = () => doDelete(inst.id, inst.name);
|
||||
document.getElementById('confirm-ok').onclick = () => doDelete(inst.vmid, inst.name);
|
||||
document.getElementById('confirm-overlay').classList.add('open');
|
||||
}
|
||||
|
||||
@@ -244,9 +325,9 @@ function closeConfirm() {
|
||||
document.getElementById('confirm-overlay').classList.remove('open');
|
||||
}
|
||||
|
||||
function doDelete(id, name) {
|
||||
deleteInstance(id);
|
||||
async function doDelete(vmid, name) {
|
||||
closeConfirm();
|
||||
await deleteInstance(vmid);
|
||||
showToast(`${name} deleted`, 'success');
|
||||
navigate('dashboard');
|
||||
}
|
||||
@@ -261,18 +342,87 @@ function showToast(msg, type = 'success') {
|
||||
toastTimer = setTimeout(() => t.classList.remove('show'), 3000);
|
||||
}
|
||||
|
||||
// ── Global keyboard handler ───────────────────────────────────────────────────
|
||||
// ── Settings Modal ────────────────────────────────────────────────────────────
|
||||
|
||||
function openSettingsModal() {
|
||||
const sel = document.getElementById('tz-select');
|
||||
if (!sel.options.length) {
|
||||
for (const { label, tz } of TIMEZONES) {
|
||||
const opt = document.createElement('option');
|
||||
opt.value = tz;
|
||||
opt.textContent = label;
|
||||
sel.appendChild(opt);
|
||||
}
|
||||
}
|
||||
sel.value = getTimezone();
|
||||
document.getElementById('settings-modal').classList.add('open');
|
||||
}
|
||||
|
||||
function closeSettingsModal() {
|
||||
document.getElementById('settings-modal').classList.remove('open');
|
||||
document.getElementById('import-file').value = '';
|
||||
}
|
||||
|
||||
async function exportDB() {
|
||||
const res = await fetch('/api/export');
|
||||
const blob = await res.blob();
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `catalyst-backup-${new Date().toISOString().slice(0, 10)}.json`;
|
||||
a.click();
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
async function importDB() {
|
||||
const file = document.getElementById('import-file').files[0];
|
||||
if (!file) { showToast('Select a backup file first', 'error'); return; }
|
||||
document.getElementById('confirm-title').textContent = 'Replace all instances?';
|
||||
document.getElementById('confirm-msg').textContent =
|
||||
`This will delete all current instances and replace them with the contents of "${file.name}". This cannot be undone.`;
|
||||
document.getElementById('confirm-overlay').classList.add('open');
|
||||
document.getElementById('confirm-ok').onclick = async () => {
|
||||
closeConfirm();
|
||||
try {
|
||||
const { instances, history = [] } = JSON.parse(await file.text());
|
||||
const res = await fetch('/api/import', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ instances, history }),
|
||||
});
|
||||
const data = await res.json();
|
||||
if (!res.ok) { showToast(data.error ?? 'Import failed', 'error'); return; }
|
||||
showToast(`Imported ${data.imported} instance${data.imported !== 1 ? 's' : ''}`, 'success');
|
||||
closeSettingsModal();
|
||||
renderDashboard();
|
||||
} catch {
|
||||
showToast('Invalid backup file', 'error');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// ── Keyboard / backdrop ───────────────────────────────────────────────────────
|
||||
|
||||
document.addEventListener('keydown', e => {
|
||||
if (e.key !== 'Escape') return;
|
||||
if (document.getElementById('instance-modal').classList.contains('open')) { closeModal(); return; }
|
||||
if (document.getElementById('confirm-overlay').classList.contains('open')) { closeConfirm(); return; }
|
||||
if (document.getElementById('settings-modal').classList.contains('open')) { closeSettingsModal(); return; }
|
||||
});
|
||||
|
||||
// Close modals on backdrop click
|
||||
document.getElementById('instance-modal').addEventListener('click', e => {
|
||||
if (e.target === document.getElementById('instance-modal')) closeModal();
|
||||
});
|
||||
document.getElementById('confirm-overlay').addEventListener('click', e => {
|
||||
if (e.target === document.getElementById('confirm-overlay')) closeConfirm();
|
||||
});
|
||||
document.getElementById('settings-modal').addEventListener('click', e => {
|
||||
if (e.target === document.getElementById('settings-modal')) closeSettingsModal();
|
||||
});
|
||||
|
||||
document.getElementById('tz-select').addEventListener('change', e => {
|
||||
localStorage.setItem('catalyst_tz', e.target.value);
|
||||
const m = window.location.pathname.match(/^\/instance\/(\d+)/);
|
||||
if (m) renderDetailPage(parseInt(m[1], 10));
|
||||
else renderDashboard();
|
||||
});
|
||||
|
||||
1
js/version.js
Normal file
1
js/version.js
Normal file
@@ -0,0 +1 @@
|
||||
const VERSION = "1.4.0";
|
||||
19
nginx.conf
19
nginx.conf
@@ -1,19 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
# SPA fallback for client-side routing
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location ~* \.(css|js)$ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
location = /index.html {
|
||||
add_header Cache-Control "no-store";
|
||||
}
|
||||
}
|
||||
1482
package-lock.json
generated
1482
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,15 +1,20 @@
|
||||
{
|
||||
"name": "catalyst",
|
||||
"version": "1.0.3",
|
||||
"version": "1.4.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "node server/server.js",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"version:write": "node -e \"const {version}=JSON.parse(require('fs').readFileSync('package.json','utf8'));require('fs').writeFileSync('js/version.js','const VERSION = \\\"'+version+'\\\";\\n');\""
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "^4.18.0",
|
||||
"helmet": "^8.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vitest": "^2.0.0",
|
||||
"sql.js": "^1.10.2",
|
||||
"jsdom": "^25.0.0"
|
||||
"jsdom": "^25.0.0",
|
||||
"supertest": "^7.0.0",
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
212
server/db.js
Normal file
212
server/db.js
Normal file
@@ -0,0 +1,212 @@
|
||||
import { DatabaseSync } from 'node:sqlite';
|
||||
import { mkdirSync } from 'fs';
|
||||
import { dirname, join } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const DEFAULT_PATH = join(__dirname, '../data/catalyst.db');
|
||||
|
||||
let db;
|
||||
|
||||
function init(path) {
|
||||
if (path !== ':memory:') {
|
||||
mkdirSync(dirname(path), { recursive: true });
|
||||
}
|
||||
db = new DatabaseSync(path);
|
||||
db.exec('PRAGMA journal_mode = WAL');
|
||||
db.exec('PRAGMA foreign_keys = ON');
|
||||
db.exec('PRAGMA synchronous = NORMAL');
|
||||
createSchema();
|
||||
if (path !== ':memory:') seed();
|
||||
}
|
||||
|
||||
function createSchema() {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS instances (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL CHECK(length(name) BETWEEN 1 AND 100),
|
||||
state TEXT NOT NULL DEFAULT 'deployed'
|
||||
CHECK(state IN ('deployed','testing','degraded')),
|
||||
stack TEXT NOT NULL DEFAULT 'development'
|
||||
CHECK(stack IN ('production','development')),
|
||||
vmid INTEGER NOT NULL UNIQUE CHECK(vmid > 0),
|
||||
atlas INTEGER NOT NULL DEFAULT 0 CHECK(atlas IN (0,1)),
|
||||
argus INTEGER NOT NULL DEFAULT 0 CHECK(argus IN (0,1)),
|
||||
semaphore INTEGER NOT NULL DEFAULT 0 CHECK(semaphore IN (0,1)),
|
||||
patchmon INTEGER NOT NULL DEFAULT 0 CHECK(patchmon IN (0,1)),
|
||||
tailscale INTEGER NOT NULL DEFAULT 0 CHECK(tailscale IN (0,1)),
|
||||
andromeda INTEGER NOT NULL DEFAULT 0 CHECK(andromeda IN (0,1)),
|
||||
tailscale_ip TEXT NOT NULL DEFAULT '',
|
||||
hardware_acceleration INTEGER NOT NULL DEFAULT 0 CHECK(hardware_acceleration IN (0,1)),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_instances_state ON instances(state);
|
||||
CREATE INDEX IF NOT EXISTS idx_instances_stack ON instances(stack);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS instance_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
vmid INTEGER NOT NULL,
|
||||
field TEXT NOT NULL,
|
||||
old_value TEXT,
|
||||
new_value TEXT,
|
||||
changed_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_history_vmid ON instance_history(vmid);
|
||||
`);
|
||||
}
|
||||
|
||||
const SEED = [
|
||||
{ name: 'plex', state: 'deployed', stack: 'production', vmid: 117, atlas: 1, argus: 1, semaphore: 0, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.1', hardware_acceleration: 1 },
|
||||
{ name: 'foldergram', state: 'testing', stack: 'development', vmid: 137, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 },
|
||||
{ name: 'homeassistant', state: 'deployed', stack: 'production', vmid: 102, atlas: 1, argus: 1, semaphore: 1, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.5', hardware_acceleration: 0 },
|
||||
{ name: 'gitea', state: 'deployed', stack: 'production', vmid: 110, atlas: 1, argus: 0, semaphore: 1, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.8', hardware_acceleration: 0 },
|
||||
{ name: 'postgres-primary', state: 'degraded', stack: 'production', vmid: 201, atlas: 1, argus: 1, semaphore: 0, patchmon: 1, tailscale: 0, andromeda: 1, tailscale_ip: '', hardware_acceleration: 0 },
|
||||
{ name: 'nextcloud', state: 'testing', stack: 'development', vmid: 144, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.12', hardware_acceleration: 0 },
|
||||
{ name: 'traefik', state: 'deployed', stack: 'production', vmid: 100, atlas: 1, argus: 1, semaphore: 0, patchmon: 1, tailscale: 1, andromeda: 0, tailscale_ip: '100.64.0.2', hardware_acceleration: 0 },
|
||||
{ name: 'monitoring-stack', state: 'testing', stack: 'development', vmid: 155, atlas: 0, argus: 0, semaphore: 1, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 },
|
||||
];
|
||||
|
||||
function seed() {
|
||||
const count = db.prepare('SELECT COUNT(*) as n FROM instances').get().n;
|
||||
if (count > 0) return;
|
||||
const insert = db.prepare(`
|
||||
INSERT INTO instances
|
||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon,
|
||||
tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||
VALUES
|
||||
(@name, @state, @stack, @vmid, @atlas, @argus, @semaphore, @patchmon,
|
||||
@tailscale, @andromeda, @tailscale_ip, @hardware_acceleration)
|
||||
`);
|
||||
db.exec('BEGIN');
|
||||
for (const s of SEED) insert.run(s);
|
||||
db.exec('COMMIT');
|
||||
}
|
||||
|
||||
// ── Queries ───────────────────────────────────────────────────────────────────
|
||||
|
||||
export function getInstances(filters = {}) {
|
||||
const parts = ['SELECT * FROM instances WHERE 1=1'];
|
||||
const params = {};
|
||||
if (filters.search) {
|
||||
parts.push('AND (name LIKE @search OR CAST(vmid AS TEXT) LIKE @search OR stack LIKE @search)');
|
||||
params.search = `%${filters.search}%`;
|
||||
}
|
||||
if (filters.state) { parts.push('AND state = @state'); params.state = filters.state; }
|
||||
if (filters.stack) { parts.push('AND stack = @stack'); params.stack = filters.stack; }
|
||||
parts.push('ORDER BY name ASC');
|
||||
return db.prepare(parts.join(' ')).all(params);
|
||||
}
|
||||
|
||||
export function getInstance(vmid) {
|
||||
return db.prepare('SELECT * FROM instances WHERE vmid = ?').get(vmid) ?? null;
|
||||
}
|
||||
|
||||
export function getDistinctStacks() {
|
||||
return db.prepare(`SELECT DISTINCT stack FROM instances WHERE stack != '' ORDER BY stack`)
|
||||
.all().map(r => r.stack);
|
||||
}
|
||||
|
||||
// ── Mutations ─────────────────────────────────────────────────────────────────
|
||||
|
||||
const HISTORY_FIELDS = [
|
||||
'name', 'state', 'stack', 'vmid', 'tailscale_ip',
|
||||
'atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda',
|
||||
'hardware_acceleration',
|
||||
];
|
||||
|
||||
export function createInstance(data) {
|
||||
db.prepare(`
|
||||
INSERT INTO instances
|
||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon,
|
||||
tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||
VALUES
|
||||
(@name, @state, @stack, @vmid, @atlas, @argus, @semaphore, @patchmon,
|
||||
@tailscale, @andromeda, @tailscale_ip, @hardware_acceleration)
|
||||
`).run(data);
|
||||
db.prepare(
|
||||
`INSERT INTO instance_history (vmid, field, old_value, new_value) VALUES (?, 'created', NULL, NULL)`
|
||||
).run(data.vmid);
|
||||
}
|
||||
|
||||
export function updateInstance(vmid, data) {
|
||||
const old = getInstance(vmid);
|
||||
db.prepare(`
|
||||
UPDATE instances SET
|
||||
name=@name, state=@state, stack=@stack, vmid=@newVmid,
|
||||
atlas=@atlas, argus=@argus, semaphore=@semaphore, patchmon=@patchmon,
|
||||
tailscale=@tailscale, andromeda=@andromeda, tailscale_ip=@tailscale_ip,
|
||||
hardware_acceleration=@hardware_acceleration, updated_at=datetime('now')
|
||||
WHERE vmid=@vmid
|
||||
`).run({ ...data, newVmid: data.vmid, vmid });
|
||||
const newVmid = data.vmid;
|
||||
const insertEvt = db.prepare(
|
||||
`INSERT INTO instance_history (vmid, field, old_value, new_value) VALUES (?, ?, ?, ?)`
|
||||
);
|
||||
for (const field of HISTORY_FIELDS) {
|
||||
const oldVal = String(old[field] ?? '');
|
||||
const newVal = String(field === 'vmid' ? newVmid : (data[field] ?? ''));
|
||||
if (oldVal !== newVal) insertEvt.run(newVmid, field, oldVal, newVal);
|
||||
}
|
||||
}
|
||||
|
||||
export function deleteInstance(vmid) {
|
||||
db.prepare('DELETE FROM instance_history WHERE vmid = ?').run(vmid);
|
||||
db.prepare('DELETE FROM instances WHERE vmid = ?').run(vmid);
|
||||
}
|
||||
|
||||
export function importInstances(rows, historyRows = []) {
|
||||
db.exec('BEGIN');
|
||||
db.exec('DELETE FROM instance_history');
|
||||
db.exec('DELETE FROM instances');
|
||||
const insert = db.prepare(`
|
||||
INSERT INTO instances
|
||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon,
|
||||
tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||
VALUES
|
||||
(@name, @state, @stack, @vmid, @atlas, @argus, @semaphore, @patchmon,
|
||||
@tailscale, @andromeda, @tailscale_ip, @hardware_acceleration)
|
||||
`);
|
||||
for (const row of rows) insert.run(row);
|
||||
if (historyRows.length) {
|
||||
const insertHist = db.prepare(
|
||||
`INSERT INTO instance_history (vmid, field, old_value, new_value, changed_at) VALUES (?, ?, ?, ?, ?)`
|
||||
);
|
||||
for (const h of historyRows) insertHist.run(h.vmid, h.field, h.old_value ?? null, h.new_value ?? null, h.changed_at);
|
||||
}
|
||||
db.exec('COMMIT');
|
||||
}
|
||||
|
||||
export function getInstanceHistory(vmid) {
|
||||
return db.prepare(
|
||||
'SELECT * FROM instance_history WHERE vmid = ? ORDER BY changed_at DESC'
|
||||
).all(vmid);
|
||||
}
|
||||
|
||||
export function getAllHistory() {
|
||||
return db.prepare('SELECT * FROM instance_history ORDER BY vmid, changed_at').all();
|
||||
}
|
||||
|
||||
// ── Test helpers ──────────────────────────────────────────────────────────────
|
||||
|
||||
export function _resetForTest() {
|
||||
if (db) db.close();
|
||||
init(':memory:');
|
||||
}
|
||||
|
||||
// ── Boot ──────────────────────────────────────────────────────────────────────
|
||||
// Skipped in test environment — parallel Vitest workers would race to open
|
||||
// the same file, causing "database is locked". _resetForTest() in beforeEach
|
||||
// handles initialisation for every test worker using :memory: instead.
|
||||
|
||||
if (process.env.NODE_ENV !== 'test') {
|
||||
const DB_PATH = process.env.DB_PATH ?? DEFAULT_PATH;
|
||||
try {
|
||||
init(DB_PATH);
|
||||
} catch (e) {
|
||||
console.error('[catalyst] fatal: could not open database at', DB_PATH);
|
||||
console.error('[catalyst] ensure the data directory exists and is writable by the server process.');
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
162
server/routes.js
Normal file
162
server/routes.js
Normal file
@@ -0,0 +1,162 @@
|
||||
import { Router } from 'express';
|
||||
import {
|
||||
getInstances, getInstance, getDistinctStacks,
|
||||
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory, getAllHistory,
|
||||
} from './db.js';
|
||||
|
||||
export const router = Router();
|
||||
|
||||
// ── Validation ────────────────────────────────────────────────────────────────
|
||||
|
||||
const VALID_STATES = ['deployed', 'testing', 'degraded'];
|
||||
const VALID_STACKS = ['production', 'development'];
|
||||
const SERVICE_KEYS = ['atlas', 'argus', 'semaphore', 'patchmon', 'tailscale', 'andromeda'];
|
||||
|
||||
function validate(body) {
|
||||
const errors = [];
|
||||
if (!body.name || typeof body.name !== 'string' || !body.name.trim())
|
||||
errors.push('name is required');
|
||||
if (!Number.isInteger(body.vmid) || body.vmid < 1)
|
||||
errors.push('vmid must be a positive integer');
|
||||
if (!VALID_STATES.includes(body.state))
|
||||
errors.push(`state must be one of: ${VALID_STATES.join(', ')}`);
|
||||
if (!VALID_STACKS.includes(body.stack))
|
||||
errors.push(`stack must be one of: ${VALID_STACKS.join(', ')}`);
|
||||
const ip = (body.tailscale_ip ?? '').trim();
|
||||
if (ip && !/^(\d{1,3}\.){3}\d{1,3}$/.test(ip))
|
||||
errors.push('tailscale_ip must be a valid IPv4 address or empty');
|
||||
return errors;
|
||||
}
|
||||
|
||||
function handleDbError(context, e, res) {
|
||||
if (e.message.includes('UNIQUE')) return res.status(409).json({ error: 'vmid already exists' });
|
||||
if (e.message.includes('CHECK')) return res.status(400).json({ error: 'invalid field value' });
|
||||
console.error(context, e);
|
||||
res.status(500).json({ error: 'internal server error' });
|
||||
}
|
||||
|
||||
function normalise(body) {
|
||||
const row = {
|
||||
name: (body.name ?? '').trim(),
|
||||
state: body.state,
|
||||
stack: body.stack,
|
||||
vmid: body.vmid,
|
||||
tailscale_ip: (body.tailscale_ip ?? '').trim(),
|
||||
hardware_acceleration: body.hardware_acceleration ? 1 : 0,
|
||||
};
|
||||
for (const svc of SERVICE_KEYS) row[svc] = body[svc] ? 1 : 0;
|
||||
return row;
|
||||
}
|
||||
|
||||
// ── Routes ────────────────────────────────────────────────────────────────────
|
||||
|
||||
// GET /api/instances/stacks — must be declared before /:vmid
|
||||
router.get('/instances/stacks', (_req, res) => {
|
||||
res.json(getDistinctStacks());
|
||||
});
|
||||
|
||||
// GET /api/instances
|
||||
router.get('/instances', (req, res) => {
|
||||
const { search, state, stack } = req.query;
|
||||
res.json(getInstances({ search, state, stack }));
|
||||
});
|
||||
|
||||
// GET /api/instances/:vmid/history
|
||||
router.get('/instances/:vmid/history', (req, res) => {
|
||||
const vmid = parseInt(req.params.vmid, 10);
|
||||
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||
if (!getInstance(vmid)) return res.status(404).json({ error: 'instance not found' });
|
||||
res.json(getInstanceHistory(vmid));
|
||||
});
|
||||
|
||||
// GET /api/instances/:vmid
|
||||
router.get('/instances/:vmid', (req, res) => {
|
||||
const vmid = parseInt(req.params.vmid, 10);
|
||||
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||
|
||||
const instance = getInstance(vmid);
|
||||
if (!instance) return res.status(404).json({ error: 'instance not found' });
|
||||
|
||||
res.json(instance);
|
||||
});
|
||||
|
||||
// POST /api/instances
|
||||
router.post('/instances', (req, res) => {
|
||||
const errors = validate(req.body);
|
||||
if (errors.length) return res.status(400).json({ errors });
|
||||
|
||||
try {
|
||||
const data = normalise(req.body);
|
||||
createInstance(data);
|
||||
const created = getInstance(data.vmid);
|
||||
res.status(201).json(created);
|
||||
} catch (e) {
|
||||
handleDbError('POST /api/instances', e, res);
|
||||
}
|
||||
});
|
||||
|
||||
// PUT /api/instances/:vmid
|
||||
router.put('/instances/:vmid', (req, res) => {
|
||||
const vmid = parseInt(req.params.vmid, 10);
|
||||
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||
if (!getInstance(vmid)) return res.status(404).json({ error: 'instance not found' });
|
||||
|
||||
const errors = validate(req.body);
|
||||
if (errors.length) return res.status(400).json({ errors });
|
||||
|
||||
try {
|
||||
const data = normalise(req.body);
|
||||
updateInstance(vmid, data);
|
||||
res.json(getInstance(data.vmid));
|
||||
} catch (e) {
|
||||
handleDbError('PUT /api/instances/:vmid', e, res);
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/export
|
||||
router.get('/export', (_req, res) => {
|
||||
const instances = getInstances();
|
||||
const history = getAllHistory();
|
||||
const date = new Date().toISOString().slice(0, 10);
|
||||
res.setHeader('Content-Disposition', `attachment; filename="catalyst-backup-${date}.json"`);
|
||||
res.json({ version: 2, exported_at: new Date().toISOString(), instances, history });
|
||||
});
|
||||
|
||||
// POST /api/import
|
||||
router.post('/import', (req, res) => {
|
||||
const { instances, history = [] } = req.body ?? {};
|
||||
if (!Array.isArray(instances)) {
|
||||
return res.status(400).json({ error: 'body must contain an instances array' });
|
||||
}
|
||||
const errors = [];
|
||||
for (const [i, row] of instances.entries()) {
|
||||
const errs = validate(normalise(row));
|
||||
if (errs.length) errors.push({ index: i, errors: errs });
|
||||
}
|
||||
if (errors.length) return res.status(400).json({ errors });
|
||||
try {
|
||||
importInstances(instances.map(normalise), Array.isArray(history) ? history : []);
|
||||
res.json({ imported: instances.length });
|
||||
} catch (e) {
|
||||
console.error('POST /api/import', e);
|
||||
res.status(500).json({ error: 'internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE /api/instances/:vmid
|
||||
router.delete('/instances/:vmid', (req, res) => {
|
||||
const vmid = parseInt(req.params.vmid, 10);
|
||||
if (!vmid) return res.status(400).json({ error: 'invalid vmid' });
|
||||
|
||||
const instance = getInstance(vmid);
|
||||
if (!instance) return res.status(404).json({ error: 'instance not found' });
|
||||
if (instance.stack !== 'development')
|
||||
return res.status(422).json({ error: 'only development instances can be deleted' });
|
||||
|
||||
try {
|
||||
deleteInstance(vmid);
|
||||
res.status(204).end();
|
||||
} catch (e) {
|
||||
handleDbError('DELETE /api/instances/:vmid', e, res);
|
||||
}
|
||||
});
|
||||
51
server/server.js
Normal file
51
server/server.js
Normal file
@@ -0,0 +1,51 @@
|
||||
import express from 'express';
|
||||
import helmet from 'helmet';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, join } from 'path';
|
||||
import { router } from './routes.js';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const PORT = process.env.PORT ?? 3000;
|
||||
|
||||
export const app = express();
|
||||
|
||||
app.use(helmet({
|
||||
contentSecurityPolicy: {
|
||||
useDefaults: false, // explicit — upgrade-insecure-requests breaks HTTP deployments
|
||||
directives: {
|
||||
'default-src': ["'self'"],
|
||||
'base-uri': ["'self'"],
|
||||
'font-src': ["'self'", 'https://fonts.gstatic.com'],
|
||||
'form-action': ["'self'"],
|
||||
'frame-ancestors': ["'self'"],
|
||||
'img-src': ["'self'", 'data:'],
|
||||
'object-src': ["'none'"],
|
||||
'script-src': ["'self'"],
|
||||
'script-src-attr': ["'unsafe-inline'"], // allow onclick handlers
|
||||
'style-src': ["'self'", 'https://fonts.googleapis.com'],
|
||||
},
|
||||
},
|
||||
}));
|
||||
app.use(express.json());
|
||||
|
||||
// API
|
||||
app.use('/api', router);
|
||||
|
||||
// Static files
|
||||
app.use(express.static(join(__dirname, '..')));
|
||||
|
||||
// SPA fallback — all non-API, non-asset routes serve index.html
|
||||
app.get('*', (req, res) => {
|
||||
res.sendFile(join(__dirname, '../index.html'));
|
||||
});
|
||||
|
||||
// Error handler
|
||||
app.use((err, _req, res, _next) => {
|
||||
console.error(err);
|
||||
res.status(500).json({ error: 'internal server error' });
|
||||
});
|
||||
|
||||
// Boot — only when run directly, not when imported by tests
|
||||
if (process.argv[1] === fileURLToPath(import.meta.url)) {
|
||||
app.listen(PORT, () => console.log(`catalyst on :${PORT}`));
|
||||
}
|
||||
455
tests/api.test.js
Normal file
455
tests/api.test.js
Normal file
@@ -0,0 +1,455 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
|
||||
import request from 'supertest'
|
||||
import { app } from '../server/server.js'
|
||||
import { _resetForTest } from '../server/db.js'
|
||||
import * as dbModule from '../server/db.js'
|
||||
|
||||
beforeEach(() => _resetForTest())
|
||||
|
||||
const base = {
|
||||
name: 'traefik',
|
||||
vmid: 100,
|
||||
state: 'deployed',
|
||||
stack: 'production',
|
||||
atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0,
|
||||
tailscale_ip: '',
|
||||
hardware_acceleration: 0,
|
||||
}
|
||||
|
||||
// ── GET /api/instances ────────────────────────────────────────────────────────
|
||||
|
||||
describe('GET /api/instances', () => {
|
||||
it('returns empty array when no instances exist', async () => {
|
||||
const res = await request(app).get('/api/instances')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual([])
|
||||
})
|
||||
|
||||
it('returns all instances sorted by name', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'zebra' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'alpha' })
|
||||
const res = await request(app).get('/api/instances')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toHaveLength(2)
|
||||
expect(res.body[0].name).toBe('alpha')
|
||||
expect(res.body[1].name).toBe('zebra')
|
||||
})
|
||||
|
||||
it('filters by state', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'a', state: 'deployed' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'b', state: 'degraded' })
|
||||
const res = await request(app).get('/api/instances?state=deployed')
|
||||
expect(res.body).toHaveLength(1)
|
||||
expect(res.body[0].name).toBe('a')
|
||||
})
|
||||
|
||||
it('filters by stack', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'a', stack: 'production' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'b', stack: 'development', state: 'testing' })
|
||||
const res = await request(app).get('/api/instances?stack=development')
|
||||
expect(res.body).toHaveLength(1)
|
||||
expect(res.body[0].name).toBe('b')
|
||||
})
|
||||
|
||||
it('searches by name substring', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'plex' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'gitea' })
|
||||
const res = await request(app).get('/api/instances?search=ple')
|
||||
expect(res.body).toHaveLength(1)
|
||||
expect(res.body[0].name).toBe('plex')
|
||||
})
|
||||
|
||||
it('searches by vmid', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 137, name: 'a' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 200, name: 'b' })
|
||||
const res = await request(app).get('/api/instances?search=137')
|
||||
expect(res.body).toHaveLength(1)
|
||||
expect(res.body[0].vmid).toBe(137)
|
||||
})
|
||||
|
||||
it('combines search and state filters', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'plex', state: 'deployed' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'plex2', state: 'degraded' })
|
||||
const res = await request(app).get('/api/instances?search=plex&state=deployed')
|
||||
expect(res.body).toHaveLength(1)
|
||||
expect(res.body[0].name).toBe('plex')
|
||||
})
|
||||
})
|
||||
|
||||
// ── GET /api/instances/stacks ─────────────────────────────────────────────────
|
||||
|
||||
describe('GET /api/instances/stacks', () => {
|
||||
it('returns empty array when no instances exist', async () => {
|
||||
const res = await request(app).get('/api/instances/stacks')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual([])
|
||||
})
|
||||
|
||||
it('returns unique stacks sorted alphabetically', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 1, name: 'a', stack: 'production' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 2, name: 'b', stack: 'development', state: 'testing' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 3, name: 'c', stack: 'production' })
|
||||
const res = await request(app).get('/api/instances/stacks')
|
||||
expect(res.body).toEqual(['development', 'production'])
|
||||
})
|
||||
})
|
||||
|
||||
// ── GET /api/instances/:vmid ──────────────────────────────────────────────────
|
||||
|
||||
describe('GET /api/instances/:vmid', () => {
|
||||
it('returns the instance for a known vmid', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 117, name: 'plex' })
|
||||
const res = await request(app).get('/api/instances/117')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.name).toBe('plex')
|
||||
expect(res.body.vmid).toBe(117)
|
||||
})
|
||||
|
||||
it('returns 404 for unknown vmid', async () => {
|
||||
const res = await request(app).get('/api/instances/999')
|
||||
expect(res.status).toBe(404)
|
||||
expect(res.body.error).toBeDefined()
|
||||
})
|
||||
|
||||
it('returns 400 for non-numeric vmid', async () => {
|
||||
const res = await request(app).get('/api/instances/abc')
|
||||
expect(res.status).toBe(400)
|
||||
})
|
||||
})
|
||||
|
||||
// ── POST /api/instances ───────────────────────────────────────────────────────
|
||||
|
||||
describe('POST /api/instances', () => {
|
||||
it('creates an instance and returns 201 with the created record', async () => {
|
||||
const res = await request(app).post('/api/instances').send(base)
|
||||
expect(res.status).toBe(201)
|
||||
expect(res.body.name).toBe('traefik')
|
||||
expect(res.body.vmid).toBe(100)
|
||||
expect(res.body.created_at).not.toBeNull()
|
||||
expect(res.body.updated_at).not.toBeNull()
|
||||
})
|
||||
|
||||
it('stores service flags correctly', async () => {
|
||||
const res = await request(app).post('/api/instances').send({ ...base, atlas: 1, tailscale: 1, hardware_acceleration: 1 })
|
||||
expect(res.body.atlas).toBe(1)
|
||||
expect(res.body.tailscale).toBe(1)
|
||||
expect(res.body.hardware_acceleration).toBe(1)
|
||||
expect(res.body.argus).toBe(0)
|
||||
})
|
||||
|
||||
it('returns 409 for duplicate vmid', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const res = await request(app).post('/api/instances').send({ ...base, name: 'other' })
|
||||
expect(res.status).toBe(409)
|
||||
expect(res.body.error).toMatch(/vmid/)
|
||||
})
|
||||
|
||||
it('returns 400 when name is missing', async () => {
|
||||
const res = await request(app).post('/api/instances').send({ ...base, name: '' })
|
||||
expect(res.status).toBe(400)
|
||||
expect(res.body.errors).toBeInstanceOf(Array)
|
||||
})
|
||||
|
||||
it('returns 400 for vmid less than 1', async () => {
|
||||
const res = await request(app).post('/api/instances').send({ ...base, vmid: 0 })
|
||||
expect(res.status).toBe(400)
|
||||
expect(res.body.errors).toBeInstanceOf(Array)
|
||||
})
|
||||
|
||||
it('returns 400 for invalid state', async () => {
|
||||
const res = await request(app).post('/api/instances').send({ ...base, state: 'invalid' })
|
||||
expect(res.status).toBe(400)
|
||||
})
|
||||
|
||||
it('returns 400 for invalid stack', async () => {
|
||||
const res = await request(app).post('/api/instances').send({ ...base, stack: 'invalid' })
|
||||
expect(res.status).toBe(400)
|
||||
})
|
||||
|
||||
it('trims whitespace from name', async () => {
|
||||
const res = await request(app).post('/api/instances').send({ ...base, name: ' plex ' })
|
||||
expect(res.status).toBe(201)
|
||||
expect(res.body.name).toBe('plex')
|
||||
})
|
||||
})
|
||||
|
||||
// ── PUT /api/instances/:vmid ──────────────────────────────────────────────────
|
||||
|
||||
describe('PUT /api/instances/:vmid', () => {
|
||||
it('updates fields and returns the updated record', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const res = await request(app).put('/api/instances/100').send({ ...base, name: 'updated', state: 'degraded' })
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.name).toBe('updated')
|
||||
expect(res.body.state).toBe('degraded')
|
||||
})
|
||||
|
||||
it('can change the vmid', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
await request(app).put('/api/instances/100').send({ ...base, vmid: 200 })
|
||||
expect((await request(app).get('/api/instances/100')).status).toBe(404)
|
||||
expect((await request(app).get('/api/instances/200')).status).toBe(200)
|
||||
})
|
||||
|
||||
it('returns 404 for unknown vmid', async () => {
|
||||
const res = await request(app).put('/api/instances/999').send(base)
|
||||
expect(res.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 400 for validation errors', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const res = await request(app).put('/api/instances/100').send({ ...base, name: '' })
|
||||
expect(res.status).toBe(400)
|
||||
expect(res.body.errors).toBeInstanceOf(Array)
|
||||
})
|
||||
|
||||
it('returns 409 when new vmid conflicts with an existing instance', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 100, name: 'a' })
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 200, name: 'b' })
|
||||
const res = await request(app).put('/api/instances/100').send({ ...base, vmid: 200 })
|
||||
expect(res.status).toBe(409)
|
||||
})
|
||||
})
|
||||
|
||||
// ── DELETE /api/instances/:vmid ───────────────────────────────────────────────
|
||||
|
||||
describe('DELETE /api/instances/:vmid', () => {
|
||||
it('deletes a development instance and returns 204', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, stack: 'development', state: 'testing' })
|
||||
const res = await request(app).delete('/api/instances/100')
|
||||
expect(res.status).toBe(204)
|
||||
expect((await request(app).get('/api/instances/100')).status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 422 when attempting to delete a production instance', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, stack: 'production' })
|
||||
const res = await request(app).delete('/api/instances/100')
|
||||
expect(res.status).toBe(422)
|
||||
expect(res.body.error).toMatch(/development/)
|
||||
})
|
||||
|
||||
it('returns 404 for unknown vmid', async () => {
|
||||
const res = await request(app).delete('/api/instances/999')
|
||||
expect(res.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 400 for non-numeric vmid', async () => {
|
||||
const res = await request(app).delete('/api/instances/abc')
|
||||
expect(res.status).toBe(400)
|
||||
})
|
||||
})
|
||||
|
||||
// ── GET /api/instances/:vmid/history ─────────────────────────────────────────
|
||||
|
||||
describe('GET /api/instances/:vmid/history', () => {
|
||||
it('returns history events for a known vmid', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const res = await request(app).get('/api/instances/100/history')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toBeInstanceOf(Array)
|
||||
expect(res.body[0].field).toBe('created')
|
||||
})
|
||||
|
||||
it('returns 404 for unknown vmid', async () => {
|
||||
expect((await request(app).get('/api/instances/999/history')).status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 400 for non-numeric vmid', async () => {
|
||||
expect((await request(app).get('/api/instances/abc/history')).status).toBe(400)
|
||||
})
|
||||
})
|
||||
|
||||
// ── GET /api/export ───────────────────────────────────────────────────────────
|
||||
|
||||
describe('GET /api/export', () => {
|
||||
it('returns 200 with instances array and attachment header', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const res = await request(app).get('/api/export')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.headers['content-disposition']).toMatch(/attachment/)
|
||||
expect(res.body.instances).toHaveLength(1)
|
||||
expect(res.body.instances[0].name).toBe('traefik')
|
||||
})
|
||||
|
||||
it('returns empty instances array when no data', async () => {
|
||||
const res = await request(app).get('/api/export')
|
||||
expect(res.body.instances).toEqual([])
|
||||
})
|
||||
|
||||
it('returns version 2', async () => {
|
||||
const res = await request(app).get('/api/export')
|
||||
expect(res.body.version).toBe(2)
|
||||
})
|
||||
|
||||
it('includes a history array', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const res = await request(app).get('/api/export')
|
||||
expect(res.body.history).toBeInstanceOf(Array)
|
||||
expect(res.body.history.some(e => e.field === 'created')).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
// ── POST /api/import ──────────────────────────────────────────────────────────
|
||||
|
||||
describe('POST /api/import', () => {
|
||||
it('replaces all instances and returns imported count', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const res = await request(app).post('/api/import')
|
||||
.send({ instances: [{ ...base, vmid: 999, name: 'imported' }] })
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.imported).toBe(1)
|
||||
expect((await request(app).get('/api/instances')).body[0].name).toBe('imported')
|
||||
})
|
||||
|
||||
it('returns 400 if instances is not an array', async () => {
|
||||
expect((await request(app).post('/api/import').send({ instances: 'bad' })).status).toBe(400)
|
||||
})
|
||||
|
||||
it('returns 400 with per-row errors for invalid rows', async () => {
|
||||
const res = await request(app).post('/api/import')
|
||||
.send({ instances: [{ ...base, name: '', vmid: 1 }] })
|
||||
expect(res.status).toBe(400)
|
||||
expect(res.body.errors[0].index).toBe(0)
|
||||
})
|
||||
|
||||
it('returns 400 if body has no instances key', async () => {
|
||||
expect((await request(app).post('/api/import').send({})).status).toBe(400)
|
||||
})
|
||||
|
||||
it('returns 400 (not 500) when a row is missing name', async () => {
|
||||
const res = await request(app).post('/api/import')
|
||||
.send({ instances: [{ ...base, name: undefined, vmid: 1 }] })
|
||||
expect(res.status).toBe(400)
|
||||
})
|
||||
|
||||
it('restores history when history array is provided', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
const exp = await request(app).get('/api/export')
|
||||
await request(app).post('/api/instances').send({ ...base, vmid: 999, name: 'other' })
|
||||
const res = await request(app).post('/api/import').send({
|
||||
instances: exp.body.instances,
|
||||
history: exp.body.history,
|
||||
})
|
||||
expect(res.status).toBe(200)
|
||||
const hist = await request(app).get('/api/instances/100/history')
|
||||
expect(hist.body.some(e => e.field === 'created')).toBe(true)
|
||||
})
|
||||
|
||||
it('succeeds with a v1 backup that has no history key', async () => {
|
||||
const res = await request(app).post('/api/import')
|
||||
.send({ instances: [{ ...base, vmid: 1, name: 'legacy' }] })
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.imported).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
// ── Static assets & SPA routing ───────────────────────────────────────────────
|
||||
|
||||
describe('static assets and SPA routing', () => {
|
||||
it('serves index.html at root', async () => {
|
||||
const res = await request(app).get('/')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.headers['content-type']).toMatch(/html/)
|
||||
})
|
||||
|
||||
it('serves index.html for deep SPA routes (e.g. /instance/117)', async () => {
|
||||
const res = await request(app).get('/instance/117')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.headers['content-type']).toMatch(/html/)
|
||||
})
|
||||
|
||||
it('serves CSS with correct content-type (not sniffed as HTML)', async () => {
|
||||
const res = await request(app).get('/css/app.css')
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.headers['content-type']).toMatch(/text\/css/)
|
||||
})
|
||||
|
||||
it('does not set upgrade-insecure-requests in CSP (HTTP deployments must work)', async () => {
|
||||
const res = await request(app).get('/')
|
||||
const csp = res.headers['content-security-policy'] ?? ''
|
||||
expect(csp).not.toContain('upgrade-insecure-requests')
|
||||
})
|
||||
|
||||
it('allows inline event handlers in CSP (onclick attributes)', async () => {
|
||||
const res = await request(app).get('/')
|
||||
const csp = res.headers['content-security-policy'] ?? ''
|
||||
// script-src-attr must not be 'none' — that blocks onclick handlers
|
||||
expect(csp).not.toContain("script-src-attr 'none'")
|
||||
})
|
||||
|
||||
it('index.html contains base href / for correct asset resolution on deep routes', async () => {
|
||||
const res = await request(app).get('/')
|
||||
expect(res.text).toContain('<base href="/">')
|
||||
})
|
||||
})
|
||||
|
||||
// ── Error handling — unexpected DB failures ───────────────────────────────────
|
||||
|
||||
const dbError = () => Object.assign(
|
||||
new Error('attempt to write a readonly database'),
|
||||
{ code: 'ERR_SQLITE_ERROR', errcode: 8 }
|
||||
)
|
||||
|
||||
describe('error handling — unexpected DB failures', () => {
|
||||
let consoleSpy
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('POST returns 500 with friendly message when DB throws unexpectedly', async () => {
|
||||
vi.spyOn(dbModule, 'createInstance').mockImplementationOnce(() => { throw dbError() })
|
||||
const res = await request(app).post('/api/instances').send(base)
|
||||
expect(res.status).toBe(500)
|
||||
expect(res.body).toEqual({ error: 'internal server error' })
|
||||
})
|
||||
|
||||
it('POST logs the error with route context when DB throws unexpectedly', async () => {
|
||||
vi.spyOn(dbModule, 'createInstance').mockImplementationOnce(() => { throw dbError() })
|
||||
await request(app).post('/api/instances').send(base)
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('POST /api/instances'),
|
||||
expect.any(Error)
|
||||
)
|
||||
})
|
||||
|
||||
it('PUT returns 500 with friendly message when DB throws unexpectedly', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
vi.spyOn(dbModule, 'updateInstance').mockImplementationOnce(() => { throw dbError() })
|
||||
const res = await request(app).put('/api/instances/100').send(base)
|
||||
expect(res.status).toBe(500)
|
||||
expect(res.body).toEqual({ error: 'internal server error' })
|
||||
})
|
||||
|
||||
it('PUT logs the error with route context when DB throws unexpectedly', async () => {
|
||||
await request(app).post('/api/instances').send(base)
|
||||
vi.spyOn(dbModule, 'updateInstance').mockImplementationOnce(() => { throw dbError() })
|
||||
await request(app).put('/api/instances/100').send(base)
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('PUT /api/instances/:vmid'),
|
||||
expect.any(Error)
|
||||
)
|
||||
})
|
||||
|
||||
it('DELETE returns 500 with friendly message when DB throws unexpectedly', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, stack: 'development', state: 'testing' })
|
||||
vi.spyOn(dbModule, 'deleteInstance').mockImplementationOnce(() => { throw dbError() })
|
||||
const res = await request(app).delete('/api/instances/100')
|
||||
expect(res.status).toBe(500)
|
||||
expect(res.body).toEqual({ error: 'internal server error' })
|
||||
})
|
||||
|
||||
it('DELETE logs the error with route context when DB throws unexpectedly', async () => {
|
||||
await request(app).post('/api/instances').send({ ...base, stack: 'development', state: 'testing' })
|
||||
vi.spyOn(dbModule, 'deleteInstance').mockImplementationOnce(() => { throw dbError() })
|
||||
await request(app).delete('/api/instances/100')
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('DELETE /api/instances/:vmid'),
|
||||
expect.any(Error)
|
||||
)
|
||||
})
|
||||
})
|
||||
439
tests/db.test.js
439
tests/db.test.js
@@ -1,250 +1,271 @@
|
||||
import { describe, it, expect, beforeEach } from 'vitest'
|
||||
import initSqlJs from 'sql.js'
|
||||
import {
|
||||
_resetForTest,
|
||||
getInstances, getInstance, getDistinctStacks,
|
||||
createInstance, updateInstance, deleteInstance, importInstances, getInstanceHistory,
|
||||
} from '../server/db.js'
|
||||
|
||||
// ── Schema (mirrors db.js) ────────────────────────────────────────────────────
|
||||
beforeEach(() => _resetForTest());
|
||||
|
||||
const SCHEMA = `
|
||||
CREATE TABLE instances (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
state TEXT DEFAULT 'deployed',
|
||||
stack TEXT DEFAULT '',
|
||||
vmid INTEGER UNIQUE NOT NULL,
|
||||
atlas INTEGER DEFAULT 0,
|
||||
argus INTEGER DEFAULT 0,
|
||||
semaphore INTEGER DEFAULT 0,
|
||||
patchmon INTEGER DEFAULT 0,
|
||||
tailscale INTEGER DEFAULT 0,
|
||||
andromeda INTEGER DEFAULT 0,
|
||||
tailscale_ip TEXT DEFAULT '',
|
||||
hardware_acceleration INTEGER DEFAULT 0,
|
||||
createdAt TEXT DEFAULT (datetime('now')),
|
||||
updatedAt TEXT DEFAULT (datetime('now'))
|
||||
)
|
||||
`
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
let db
|
||||
|
||||
beforeEach(async () => {
|
||||
const SQL = await initSqlJs()
|
||||
db = new SQL.Database()
|
||||
db.run(SCHEMA)
|
||||
})
|
||||
|
||||
function rows(res) {
|
||||
if (!res.length) return []
|
||||
const cols = res[0].columns
|
||||
return res[0].values.map(row => Object.fromEntries(cols.map((c, i) => [c, row[i]])))
|
||||
}
|
||||
|
||||
function insert(overrides = {}) {
|
||||
const defaults = {
|
||||
name: 'test-instance', state: 'deployed', stack: 'production', vmid: 100,
|
||||
atlas: 0, argus: 0, semaphore: 0, patchmon: 0,
|
||||
tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0,
|
||||
}
|
||||
const d = { ...defaults, ...overrides }
|
||||
db.run(
|
||||
`INSERT INTO instances
|
||||
(name, state, stack, vmid, atlas, argus, semaphore, patchmon, tailscale, andromeda, tailscale_ip, hardware_acceleration)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[d.name, d.state, d.stack, d.vmid, d.atlas, d.argus, d.semaphore,
|
||||
d.patchmon, d.tailscale, d.andromeda, d.tailscale_ip, d.hardware_acceleration]
|
||||
)
|
||||
return d
|
||||
}
|
||||
|
||||
function getInstances(filters = {}) {
|
||||
let sql = 'SELECT * FROM instances WHERE 1=1'
|
||||
const params = []
|
||||
if (filters.search) {
|
||||
sql += ' AND (name LIKE ? OR CAST(vmid AS TEXT) LIKE ? OR stack LIKE ?)'
|
||||
const s = `%${filters.search}%`
|
||||
params.push(s, s, s)
|
||||
}
|
||||
if (filters.state) { sql += ' AND state = ?'; params.push(filters.state) }
|
||||
if (filters.stack) { sql += ' AND stack = ?'; params.push(filters.stack) }
|
||||
sql += ' ORDER BY name ASC'
|
||||
return rows(db.exec(sql, params))
|
||||
}
|
||||
|
||||
function getInstance(vmid) {
|
||||
const res = rows(db.exec('SELECT * FROM instances WHERE vmid = ?', [vmid]))
|
||||
return res[0] ?? null
|
||||
}
|
||||
|
||||
function getDistinctStacks() {
|
||||
const res = db.exec(`SELECT DISTINCT stack FROM instances WHERE stack != '' ORDER BY stack`)
|
||||
if (!res.length) return []
|
||||
return res[0].values.map(r => r[0])
|
||||
}
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
// ── getInstances ──────────────────────────────────────────────────────────────
|
||||
|
||||
describe('getInstances', () => {
|
||||
it('returns empty array when no instances exist', () => {
|
||||
expect(getInstances()).toEqual([])
|
||||
})
|
||||
it('returns empty array when table is empty', () => {
|
||||
expect(getInstances()).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns all instances sorted by name', () => {
|
||||
insert({ name: 'zebra', vmid: 1 })
|
||||
insert({ name: 'alpha', vmid: 2 })
|
||||
const result = getInstances()
|
||||
expect(result).toHaveLength(2)
|
||||
expect(result[0].name).toBe('alpha')
|
||||
expect(result[1].name).toBe('zebra')
|
||||
})
|
||||
createInstance({ name: 'zebra', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'alpha', state: 'deployed', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
const result = getInstances();
|
||||
expect(result[0].name).toBe('alpha');
|
||||
expect(result[1].name).toBe('zebra');
|
||||
});
|
||||
|
||||
it('filters by state', () => {
|
||||
insert({ name: 'a', vmid: 1, state: 'deployed' })
|
||||
insert({ name: 'b', vmid: 2, state: 'degraded' })
|
||||
insert({ name: 'c', vmid: 3, state: 'testing' })
|
||||
expect(getInstances({ state: 'deployed' })).toHaveLength(1)
|
||||
expect(getInstances({ state: 'degraded' })).toHaveLength(1)
|
||||
expect(getInstances({ state: 'testing' })).toHaveLength(1)
|
||||
})
|
||||
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'b', state: 'degraded', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'c', state: 'testing', stack: 'development', vmid: 3, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
expect(getInstances({ state: 'deployed' })).toHaveLength(1);
|
||||
expect(getInstances({ state: 'degraded' })).toHaveLength(1);
|
||||
expect(getInstances({ state: 'testing' })).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('filters by stack', () => {
|
||||
insert({ name: 'a', vmid: 1, stack: 'production' })
|
||||
insert({ name: 'b', vmid: 2, stack: 'development' })
|
||||
expect(getInstances({ stack: 'production' })).toHaveLength(1)
|
||||
expect(getInstances({ stack: 'development' })).toHaveLength(1)
|
||||
})
|
||||
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'b', state: 'testing', stack: 'development', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
expect(getInstances({ stack: 'production' })).toHaveLength(1);
|
||||
expect(getInstances({ stack: 'development' })).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('searches by name', () => {
|
||||
insert({ name: 'plex', vmid: 1 })
|
||||
insert({ name: 'gitea', vmid: 2 })
|
||||
expect(getInstances({ search: 'ple' })).toHaveLength(1)
|
||||
expect(getInstances({ search: 'ple' })[0].name).toBe('plex')
|
||||
})
|
||||
createInstance({ name: 'plex', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'gitea', state: 'deployed', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
expect(getInstances({ search: 'ple' })).toHaveLength(1);
|
||||
expect(getInstances({ search: 'ple' })[0].name).toBe('plex');
|
||||
});
|
||||
|
||||
it('searches by vmid', () => {
|
||||
insert({ name: 'a', vmid: 137 })
|
||||
insert({ name: 'b', vmid: 200 })
|
||||
expect(getInstances({ search: '137' })).toHaveLength(1)
|
||||
})
|
||||
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 137, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'b', state: 'deployed', stack: 'production', vmid: 200, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
expect(getInstances({ search: '137' })).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('searches by stack', () => {
|
||||
insert({ name: 'a', vmid: 1, stack: 'production' })
|
||||
insert({ name: 'b', vmid: 2, stack: 'development' })
|
||||
expect(getInstances({ search: 'prod' })).toHaveLength(1)
|
||||
})
|
||||
it('combines filters', () => {
|
||||
createInstance({ name: 'plex', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'plex2', state: 'degraded', stack: 'production', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
expect(getInstances({ search: 'plex', state: 'deployed' })).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('combines search and state filters', () => {
|
||||
insert({ name: 'plex', vmid: 1, state: 'deployed' })
|
||||
insert({ name: 'plex2', vmid: 2, state: 'degraded' })
|
||||
expect(getInstances({ search: 'plex', state: 'deployed' })).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('returns empty array when no results match', () => {
|
||||
insert({ name: 'plex', vmid: 1 })
|
||||
expect(getInstances({ search: 'zzz' })).toEqual([])
|
||||
})
|
||||
})
|
||||
// ── getInstance ───────────────────────────────────────────────────────────────
|
||||
|
||||
describe('getInstance', () => {
|
||||
it('returns the instance with the given vmid', () => {
|
||||
insert({ name: 'plex', vmid: 117 })
|
||||
const inst = getInstance(117)
|
||||
expect(inst).not.toBeNull()
|
||||
expect(inst.name).toBe('plex')
|
||||
expect(inst.vmid).toBe(117)
|
||||
})
|
||||
createInstance({ name: 'plex', state: 'deployed', stack: 'production', vmid: 117, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
const inst = getInstance(117);
|
||||
expect(inst).not.toBeNull();
|
||||
expect(inst.name).toBe('plex');
|
||||
expect(inst.vmid).toBe(117);
|
||||
});
|
||||
|
||||
it('returns null for an unknown vmid', () => {
|
||||
expect(getInstance(999)).toBeNull()
|
||||
})
|
||||
})
|
||||
it('returns null for unknown vmid', () => {
|
||||
expect(getInstance(999)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── getDistinctStacks ─────────────────────────────────────────────────────────
|
||||
|
||||
describe('getDistinctStacks', () => {
|
||||
it('returns empty array when no instances exist', () => {
|
||||
expect(getDistinctStacks()).toEqual([])
|
||||
})
|
||||
it('returns empty array when table is empty', () => {
|
||||
expect(getDistinctStacks()).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns unique stacks sorted alphabetically', () => {
|
||||
insert({ vmid: 1, stack: 'production' })
|
||||
insert({ vmid: 2, stack: 'development' })
|
||||
insert({ vmid: 3, stack: 'production' })
|
||||
expect(getDistinctStacks()).toEqual(['development', 'production'])
|
||||
})
|
||||
createInstance({ name: 'a', state: 'deployed', stack: 'production', vmid: 1, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'b', state: 'testing', stack: 'development', vmid: 2, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
createInstance({ name: 'c', state: 'deployed', stack: 'production', vmid: 3, atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 });
|
||||
expect(getDistinctStacks()).toEqual(['development', 'production']);
|
||||
});
|
||||
});
|
||||
|
||||
it('excludes blank stack values', () => {
|
||||
insert({ vmid: 1, stack: '' })
|
||||
insert({ vmid: 2, stack: 'production' })
|
||||
expect(getDistinctStacks()).toEqual(['production'])
|
||||
})
|
||||
})
|
||||
// ── createInstance ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('createInstance', () => {
|
||||
it('inserts a new instance', () => {
|
||||
insert({ name: 'traefik', vmid: 100, stack: 'production', state: 'deployed' })
|
||||
const inst = getInstance(100)
|
||||
expect(inst.name).toBe('traefik')
|
||||
expect(inst.stack).toBe('production')
|
||||
expect(inst.state).toBe('deployed')
|
||||
})
|
||||
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||
|
||||
it('inserts a new instance and sets timestamps', () => {
|
||||
createInstance({ ...base, name: 'traefik', vmid: 100 });
|
||||
const inst = getInstance(100);
|
||||
expect(inst.name).toBe('traefik');
|
||||
expect(inst.created_at).not.toBeNull();
|
||||
expect(inst.updated_at).not.toBeNull();
|
||||
});
|
||||
|
||||
it('stores service flags correctly', () => {
|
||||
insert({ vmid: 1, atlas: 1, argus: 0, tailscale: 1, hardware_acceleration: 1 })
|
||||
const inst = getInstance(1)
|
||||
expect(inst.atlas).toBe(1)
|
||||
expect(inst.argus).toBe(0)
|
||||
expect(inst.tailscale).toBe(1)
|
||||
expect(inst.hardware_acceleration).toBe(1)
|
||||
})
|
||||
createInstance({ ...base, name: 'plex', vmid: 1, atlas: 1, tailscale: 1, hardware_acceleration: 1 });
|
||||
const inst = getInstance(1);
|
||||
expect(inst.atlas).toBe(1);
|
||||
expect(inst.argus).toBe(0);
|
||||
expect(inst.tailscale).toBe(1);
|
||||
expect(inst.hardware_acceleration).toBe(1);
|
||||
});
|
||||
|
||||
it('rejects duplicate vmid', () => {
|
||||
insert({ vmid: 100 })
|
||||
expect(() => insert({ name: 'other', vmid: 100 })).toThrow()
|
||||
})
|
||||
createInstance({ ...base, name: 'a', vmid: 100 });
|
||||
expect(() => createInstance({ ...base, name: 'b', vmid: 100 })).toThrow();
|
||||
});
|
||||
|
||||
it('sets createdAt and updatedAt on insert', () => {
|
||||
insert({ vmid: 1 })
|
||||
const inst = getInstance(1)
|
||||
expect(inst.createdAt).not.toBeNull()
|
||||
expect(inst.updatedAt).not.toBeNull()
|
||||
})
|
||||
})
|
||||
it('rejects invalid state', () => {
|
||||
expect(() => createInstance({ ...base, name: 'a', vmid: 1, state: 'invalid' })).toThrow();
|
||||
});
|
||||
|
||||
it('rejects invalid stack', () => {
|
||||
expect(() => createInstance({ ...base, name: 'a', vmid: 1, stack: 'invalid' })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
// ── updateInstance ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('updateInstance', () => {
|
||||
it('updates fields on an existing instance', () => {
|
||||
insert({ name: 'old-name', vmid: 100, state: 'testing', stack: 'development' })
|
||||
const before = getInstance(100)
|
||||
db.run(
|
||||
`UPDATE instances SET name=?, state=?, stack=?, updatedAt=datetime('now') WHERE id=?`,
|
||||
['new-name', 'deployed', 'production', before.id]
|
||||
)
|
||||
const after = getInstance(100)
|
||||
expect(after.name).toBe('new-name')
|
||||
expect(after.state).toBe('deployed')
|
||||
expect(after.stack).toBe('production')
|
||||
})
|
||||
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||
|
||||
it('updates updatedAt on write', () => {
|
||||
insert({ vmid: 1 })
|
||||
const before = getInstance(1)
|
||||
db.run(`UPDATE instances SET name=?, updatedAt=datetime('now') WHERE id=?`, ['updated', before.id])
|
||||
const after = getInstance(1)
|
||||
expect(after.updatedAt).not.toBeNull()
|
||||
})
|
||||
})
|
||||
it('updates fields and refreshes updated_at', () => {
|
||||
createInstance({ ...base, name: 'old', vmid: 100 });
|
||||
updateInstance(100, { ...base, name: 'new', vmid: 100, state: 'degraded' });
|
||||
const inst = getInstance(100);
|
||||
expect(inst.name).toBe('new');
|
||||
expect(inst.state).toBe('degraded');
|
||||
});
|
||||
|
||||
it('can change vmid', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 100 });
|
||||
updateInstance(100, { ...base, name: 'a', vmid: 200 });
|
||||
expect(getInstance(100)).toBeNull();
|
||||
expect(getInstance(200)).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── deleteInstance ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('deleteInstance', () => {
|
||||
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||
|
||||
it('removes the instance', () => {
|
||||
insert({ vmid: 1 })
|
||||
const inst = getInstance(1)
|
||||
db.run('DELETE FROM instances WHERE id = ?', [inst.id])
|
||||
expect(getInstance(1)).toBeNull()
|
||||
})
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
deleteInstance(1);
|
||||
expect(getInstance(1)).toBeNull();
|
||||
});
|
||||
|
||||
it('only removes the targeted instance', () => {
|
||||
insert({ name: 'a', vmid: 1 })
|
||||
insert({ name: 'b', vmid: 2 })
|
||||
const inst = getInstance(1)
|
||||
db.run('DELETE FROM instances WHERE id = ?', [inst.id])
|
||||
expect(getInstance(1)).toBeNull()
|
||||
expect(getInstance(2)).not.toBeNull()
|
||||
})
|
||||
})
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
createInstance({ ...base, name: 'b', vmid: 2 });
|
||||
deleteInstance(1);
|
||||
expect(getInstance(1)).toBeNull();
|
||||
expect(getInstance(2)).not.toBeNull();
|
||||
});
|
||||
|
||||
it('clears history for the deleted instance', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
deleteInstance(1);
|
||||
expect(getInstanceHistory(1)).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('does not clear history for other instances', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
createInstance({ ...base, name: 'b', vmid: 2 });
|
||||
deleteInstance(1);
|
||||
expect(getInstanceHistory(2).length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
// ── importInstances ───────────────────────────────────────────────────────────
|
||||
|
||||
describe('importInstances', () => {
|
||||
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||
|
||||
it('replaces all existing instances with the imported set', () => {
|
||||
createInstance({ ...base, name: 'old', vmid: 1 });
|
||||
importInstances([{ ...base, name: 'new', vmid: 2 }]);
|
||||
expect(getInstance(1)).toBeNull();
|
||||
expect(getInstance(2)).not.toBeNull();
|
||||
});
|
||||
|
||||
it('clears all instances when passed an empty array', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
importInstances([]);
|
||||
expect(getInstances()).toEqual([]);
|
||||
});
|
||||
|
||||
it('clears history for all replaced instances', () => {
|
||||
createInstance({ ...base, name: 'old', vmid: 1 });
|
||||
importInstances([{ ...base, name: 'new', vmid: 2 }]);
|
||||
expect(getInstanceHistory(1)).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('restores history rows when provided', () => {
|
||||
importInstances(
|
||||
[{ ...base, name: 'a', vmid: 1 }],
|
||||
[{ vmid: 1, field: 'created', old_value: null, new_value: null, changed_at: '2026-01-01 00:00:00' }]
|
||||
);
|
||||
const h = getInstanceHistory(1);
|
||||
expect(h.some(e => e.field === 'created')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
// ── instance history ─────────────────────────────────────────────────────────
|
||||
|
||||
describe('instance history', () => {
|
||||
const base = { state: 'deployed', stack: 'production', atlas: 0, argus: 0, semaphore: 0, patchmon: 0, tailscale: 0, andromeda: 0, tailscale_ip: '', hardware_acceleration: 0 };
|
||||
|
||||
it('logs a created event when an instance is created', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
const h = getInstanceHistory(1);
|
||||
expect(h).toHaveLength(1);
|
||||
expect(h[0].field).toBe('created');
|
||||
});
|
||||
|
||||
it('logs changed fields when an instance is updated', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
updateInstance(1, { ...base, name: 'a', vmid: 1, state: 'degraded' });
|
||||
const h = getInstanceHistory(1);
|
||||
const stateEvt = h.find(e => e.field === 'state');
|
||||
expect(stateEvt).toBeDefined();
|
||||
expect(stateEvt.old_value).toBe('deployed');
|
||||
expect(stateEvt.new_value).toBe('degraded');
|
||||
});
|
||||
|
||||
it('logs no events when nothing changes on update', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
updateInstance(1, { ...base, name: 'a', vmid: 1 });
|
||||
const h = getInstanceHistory(1).filter(e => e.field !== 'created');
|
||||
expect(h).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('records history under the new vmid when vmid changes', () => {
|
||||
createInstance({ ...base, name: 'a', vmid: 1 });
|
||||
updateInstance(1, { ...base, name: 'a', vmid: 2 });
|
||||
expect(getInstanceHistory(2).some(e => e.field === 'vmid')).toBe(true);
|
||||
expect(getInstanceHistory(1).filter(e => e.field !== 'created')).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Test environment boot isolation ───────────────────────────────────────────
|
||||
|
||||
describe('test environment boot isolation', () => {
|
||||
it('vitest runs with NODE_ENV=test', () => {
|
||||
// Vitest sets NODE_ENV=test automatically. This is the guard condition
|
||||
// that prevents the boot init() from opening the real database file.
|
||||
expect(process.env.NODE_ENV).toBe('test');
|
||||
});
|
||||
|
||||
it('db module loads cleanly in parallel workers without locking the real db file', () => {
|
||||
// Regression: the module-level init(DEFAULT_PATH) used to run unconditionally,
|
||||
// causing "database is locked" when multiple test workers imported db.js at
|
||||
// the same time. process.exit(1) then killed the worker mid-suite.
|
||||
// Fix: boot init is skipped when NODE_ENV=test. _resetForTest() handles setup.
|
||||
// Reaching this line proves the module loaded without calling process.exit.
|
||||
expect(() => _resetForTest()).not.toThrow();
|
||||
expect(getInstances()).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
// @vitest-environment jsdom
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import { readFileSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
|
||||
// ── esc() ─────────────────────────────────────────────────────────────────────
|
||||
// Mirrors the implementation in ui.js exactly (DOM-based).
|
||||
@@ -55,16 +58,22 @@ describe('esc', () => {
|
||||
|
||||
// ── fmtDate() ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function fmtDate(d) {
|
||||
function parseUtc(d) {
|
||||
if (typeof d !== 'string') return new Date(d)
|
||||
const hasZone = d.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(d)
|
||||
return new Date(hasZone ? d : d.replace(' ', 'T') + 'Z')
|
||||
}
|
||||
|
||||
function fmtDate(d, tz = 'UTC') {
|
||||
if (!d) return '—'
|
||||
try {
|
||||
return new Date(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric' })
|
||||
return parseUtc(d).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric', timeZone: tz })
|
||||
} catch (e) { return d }
|
||||
}
|
||||
|
||||
describe('fmtDate', () => {
|
||||
it('formats a valid ISO date string', () => {
|
||||
const result = fmtDate('2024-03-15T00:00:00')
|
||||
const result = fmtDate('2024-03-15T12:00:00Z')
|
||||
expect(result).toMatch(/Mar/)
|
||||
expect(result).toMatch(/15/)
|
||||
expect(result).toMatch(/2024/)
|
||||
@@ -85,24 +94,42 @@ describe('fmtDate', () => {
|
||||
|
||||
// ── fmtDateFull() ─────────────────────────────────────────────────────────────
|
||||
|
||||
function fmtDateFull(d) {
|
||||
function fmtDateFull(d, tz = 'UTC') {
|
||||
if (!d) return '—'
|
||||
try {
|
||||
return new Date(d).toLocaleString('en-US', {
|
||||
return parseUtc(d).toLocaleString('en-US', {
|
||||
year: 'numeric', month: 'short', day: 'numeric',
|
||||
hour: '2-digit', minute: '2-digit',
|
||||
timeZone: tz, timeZoneName: 'short',
|
||||
})
|
||||
} catch (e) { return d }
|
||||
}
|
||||
|
||||
describe('fmtDateFull', () => {
|
||||
it('includes date and time components', () => {
|
||||
const result = fmtDateFull('2024-03-15T14:30:00')
|
||||
const result = fmtDateFull('2024-03-15T14:30:00Z')
|
||||
expect(result).toMatch(/Mar/)
|
||||
expect(result).toMatch(/2024/)
|
||||
expect(result).toMatch(/\d{1,2}:\d{2}/)
|
||||
})
|
||||
|
||||
it('includes the timezone abbreviation', () => {
|
||||
expect(fmtDateFull('2024-03-15T14:30:00Z', 'UTC')).toMatch(/UTC/)
|
||||
})
|
||||
|
||||
it('converts to the given timezone', () => {
|
||||
// 2024-03-15 18:30 UTC = 2024-03-15 14:30 EDT (UTC-4 in March)
|
||||
const result = fmtDateFull('2024-03-15T18:30:00Z', 'America/New_York')
|
||||
expect(result).toMatch(/2:30/)
|
||||
expect(result).toMatch(/EDT/)
|
||||
})
|
||||
|
||||
it('treats SQLite-format timestamps (space, no Z) as UTC', () => {
|
||||
// SQLite datetime('now') → 'YYYY-MM-DD HH:MM:SS', no timezone marker.
|
||||
// Must parse identically to the same moment expressed as ISO UTC.
|
||||
expect(fmtDateFull('2024-03-15 18:30:00', 'UTC')).toBe(fmtDateFull('2024-03-15T18:30:00Z', 'UTC'))
|
||||
})
|
||||
|
||||
it('returns — for null', () => {
|
||||
expect(fmtDateFull(null)).toBe('—')
|
||||
})
|
||||
@@ -111,3 +138,111 @@ describe('fmtDateFull', () => {
|
||||
expect(fmtDateFull('')).toBe('—')
|
||||
})
|
||||
})
|
||||
|
||||
// ── versionLabel() ───────────────────────────────────────────────────────────
|
||||
// Mirrors the logic in app.js — semver strings get a v prefix, dev strings don't.
|
||||
|
||||
function versionLabel(v) {
|
||||
return /^\d/.test(v) ? `v${v}` : v
|
||||
}
|
||||
|
||||
describe('version label formatting', () => {
|
||||
it('prepends v for semver strings', () => {
|
||||
expect(versionLabel('1.1.2')).toBe('v1.1.2')
|
||||
expect(versionLabel('2.0.0')).toBe('v2.0.0')
|
||||
})
|
||||
|
||||
it('does not prepend v for dev build strings', () => {
|
||||
expect(versionLabel('dev-abc1234')).toBe('dev-abc1234')
|
||||
})
|
||||
})
|
||||
|
||||
// ── fmtHistVal() ─────────────────────────────────────────────────────────────
|
||||
// Mirrors the logic in ui.js — formats history field values for display.
|
||||
|
||||
const BOOL_FIELDS = ['atlas','argus','semaphore','patchmon','tailscale','andromeda','hardware_acceleration']
|
||||
|
||||
function fmtHistVal(field, val) {
|
||||
if (val == null || val === '') return '—'
|
||||
if (BOOL_FIELDS.includes(field)) return val === '1' ? 'on' : 'off'
|
||||
return val
|
||||
}
|
||||
|
||||
describe('fmtHistVal', () => {
|
||||
it('returns — for null', () => {
|
||||
expect(fmtHistVal('state', null)).toBe('—')
|
||||
})
|
||||
|
||||
it('returns — for empty string', () => {
|
||||
expect(fmtHistVal('state', '')).toBe('—')
|
||||
})
|
||||
|
||||
it('returns on/off for boolean service fields', () => {
|
||||
expect(fmtHistVal('atlas', '1')).toBe('on')
|
||||
expect(fmtHistVal('atlas', '0')).toBe('off')
|
||||
expect(fmtHistVal('hardware_acceleration', '1')).toBe('on')
|
||||
})
|
||||
|
||||
it('returns the value as-is for non-boolean fields', () => {
|
||||
expect(fmtHistVal('state', 'deployed')).toBe('deployed')
|
||||
expect(fmtHistVal('name', 'plex')).toBe('plex')
|
||||
expect(fmtHistVal('tailscale_ip', '100.64.0.1')).toBe('100.64.0.1')
|
||||
})
|
||||
})
|
||||
|
||||
// ── stateClass() ─────────────────────────────────────────────────────────────
|
||||
// Mirrors the logic in ui.js — maps state values to timeline CSS classes.
|
||||
|
||||
function stateClass(field, val) {
|
||||
if (field !== 'state') return ''
|
||||
return { deployed: 'tl-deployed', testing: 'tl-testing', degraded: 'tl-degraded' }[val] ?? ''
|
||||
}
|
||||
|
||||
describe('stateClass', () => {
|
||||
it('returns empty string for non-state fields', () => {
|
||||
expect(stateClass('name', 'plex')).toBe('')
|
||||
expect(stateClass('stack', 'production')).toBe('')
|
||||
})
|
||||
|
||||
it('returns the correct colour class for each state value', () => {
|
||||
expect(stateClass('state', 'deployed')).toBe('tl-deployed')
|
||||
expect(stateClass('state', 'testing')).toBe('tl-testing')
|
||||
expect(stateClass('state', 'degraded')).toBe('tl-degraded')
|
||||
})
|
||||
|
||||
it('returns empty string for unknown state values', () => {
|
||||
expect(stateClass('state', 'unknown')).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
// ── CSS regressions ───────────────────────────────────────────────────────────
|
||||
|
||||
const css = readFileSync(join(__dirname, '../css/app.css'), 'utf8')
|
||||
|
||||
describe('CSS regressions', () => {
|
||||
it('.badge has text-align: center so state labels are not left-skewed on cards', () => {
|
||||
// Regression: badges rendered left-aligned inside the card's flex-end column.
|
||||
// Without text-align: center, short labels (e.g. "deployed") appear
|
||||
// left-justified inside their pill rather than centred.
|
||||
expect(css).toMatch(/\.badge\s*\{[^}]*text-align\s*:\s*center/s)
|
||||
})
|
||||
})
|
||||
|
||||
// ── CI workflow regressions ───────────────────────────────────────────────────
|
||||
|
||||
const ciYml = readFileSync(join(__dirname, '../.gitea/workflows/ci.yml'), 'utf8')
|
||||
|
||||
describe('CI workflow regressions', () => {
|
||||
it('build-dev job passes BUILD_VERSION build arg', () => {
|
||||
// Regression: dev image showed semver instead of dev-<sha> because
|
||||
// BUILD_VERSION was never passed to docker build.
|
||||
expect(ciYml).toContain('BUILD_VERSION')
|
||||
})
|
||||
|
||||
it('short SHA is computed with git rev-parse, not $GITEA_SHA (which is empty)', () => {
|
||||
// Regression: ${GITEA_SHA::7} expands to "" on Gitea runners — nav showed "dev-".
|
||||
// git rev-parse --short HEAD works regardless of which env vars the runner sets.
|
||||
expect(ciYml).toContain('git rev-parse --short HEAD')
|
||||
expect(ciYml).not.toContain('GITEA_SHA')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -2,6 +2,6 @@ import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
environment: 'jsdom',
|
||||
environment: 'node',
|
||||
},
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user