feat: initial project scaffold with CI/CD and Docker deployment
Next.js 15 + Tailwind CSS v4 week calendar showing Six Flags park hours. Scrapes the internal CloudFront API, stores results in SQLite. Includes Dockerfile (Debian/Playwright-compatible), docker-compose, and Gitea Actions pipeline that builds and pushes to the container registry. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
218
lib/db.ts
Normal file
218
lib/db.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
import Database from "better-sqlite3";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
const DATA_DIR = path.join(process.cwd(), "data");
|
||||
const DB_PATH = path.join(DATA_DIR, "parks.db");
|
||||
|
||||
export type DbInstance = Database.Database;
|
||||
|
||||
export function openDb(): Database.Database {
|
||||
fs.mkdirSync(DATA_DIR, { recursive: true });
|
||||
const db = new Database(DB_PATH);
|
||||
db.pragma("journal_mode = WAL");
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS park_days (
|
||||
park_id TEXT NOT NULL,
|
||||
date TEXT NOT NULL, -- YYYY-MM-DD
|
||||
is_open INTEGER NOT NULL DEFAULT 0,
|
||||
hours_label TEXT,
|
||||
scraped_at TEXT NOT NULL,
|
||||
PRIMARY KEY (park_id, date)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS park_api_ids (
|
||||
park_id TEXT PRIMARY KEY,
|
||||
api_id INTEGER NOT NULL,
|
||||
api_abbreviation TEXT,
|
||||
api_name TEXT,
|
||||
discovered_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
return db;
|
||||
}
|
||||
|
||||
export function upsertDay(
|
||||
db: Database.Database,
|
||||
parkId: string,
|
||||
date: string,
|
||||
isOpen: boolean,
|
||||
hoursLabel?: string
|
||||
) {
|
||||
db.prepare(`
|
||||
INSERT INTO park_days (park_id, date, is_open, hours_label, scraped_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT (park_id, date) DO UPDATE SET
|
||||
is_open = excluded.is_open,
|
||||
hours_label = excluded.hours_label,
|
||||
scraped_at = excluded.scraped_at
|
||||
`).run(parkId, date, isOpen ? 1 : 0, hoursLabel ?? null, new Date().toISOString());
|
||||
}
|
||||
|
||||
export interface DayData {
|
||||
isOpen: boolean;
|
||||
hoursLabel: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns scraped data for all parks across a date range.
|
||||
* Shape: { parkId: { 'YYYY-MM-DD': DayData } }
|
||||
* Missing dates mean that date hasn't been scraped yet (not necessarily closed).
|
||||
*/
|
||||
export function getDateRange(
|
||||
db: Database.Database,
|
||||
startDate: string,
|
||||
endDate: string
|
||||
): Record<string, Record<string, DayData>> {
|
||||
const rows = db
|
||||
.prepare(
|
||||
`SELECT park_id, date, is_open, hours_label
|
||||
FROM park_days
|
||||
WHERE date >= ? AND date <= ?`
|
||||
)
|
||||
.all(startDate, endDate) as {
|
||||
park_id: string;
|
||||
date: string;
|
||||
is_open: number;
|
||||
hours_label: string | null;
|
||||
}[];
|
||||
|
||||
const result: Record<string, Record<string, DayData>> = {};
|
||||
for (const row of rows) {
|
||||
if (!result[row.park_id]) result[row.park_id] = {};
|
||||
result[row.park_id][row.date] = {
|
||||
isOpen: row.is_open === 1,
|
||||
hoursLabel: row.hours_label,
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Returns a map of parkId → boolean[] (index 0 = day 1) for a given month. */
|
||||
export function getMonthCalendar(
|
||||
db: Database.Database,
|
||||
year: number,
|
||||
month: number
|
||||
): Record<string, boolean[]> {
|
||||
const prefix = `${year}-${String(month).padStart(2, "0")}`;
|
||||
const rows = db
|
||||
.prepare(
|
||||
`SELECT park_id, date, is_open
|
||||
FROM park_days
|
||||
WHERE date LIKE ? || '-%'
|
||||
ORDER BY date`
|
||||
)
|
||||
.all(prefix) as { park_id: string; date: string; is_open: number }[];
|
||||
|
||||
const result: Record<string, boolean[]> = {};
|
||||
for (const row of rows) {
|
||||
if (!result[row.park_id]) result[row.park_id] = [];
|
||||
const day = parseInt(row.date.slice(8), 10);
|
||||
result[row.park_id][day - 1] = row.is_open === 1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** True if the DB already has at least one row for this park+month. */
|
||||
const STALE_AFTER_MS = 7 * 24 * 60 * 60 * 1000; // 1 week
|
||||
|
||||
/** True if the DB has data for this park+month scraped within the last week. */
|
||||
export function isMonthScraped(
|
||||
db: Database.Database,
|
||||
parkId: string,
|
||||
year: number,
|
||||
month: number
|
||||
): boolean {
|
||||
const prefix = `${year}-${String(month).padStart(2, "0")}`;
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT MAX(scraped_at) AS last_scraped
|
||||
FROM park_days
|
||||
WHERE park_id = ? AND date LIKE ? || '-%'`
|
||||
)
|
||||
.get(parkId, prefix) as { last_scraped: string | null };
|
||||
|
||||
if (!row.last_scraped) return false;
|
||||
const ageMs = Date.now() - new Date(row.last_scraped).getTime();
|
||||
return ageMs < STALE_AFTER_MS;
|
||||
}
|
||||
|
||||
export function getApiId(db: Database.Database, parkId: string): number | null {
|
||||
const row = db
|
||||
.prepare("SELECT api_id FROM park_api_ids WHERE park_id = ?")
|
||||
.get(parkId) as { api_id: number } | undefined;
|
||||
return row?.api_id ?? null;
|
||||
}
|
||||
|
||||
export function setApiId(
|
||||
db: Database.Database,
|
||||
parkId: string,
|
||||
apiId: number,
|
||||
apiAbbreviation?: string,
|
||||
apiName?: string
|
||||
) {
|
||||
db.prepare(`
|
||||
INSERT INTO park_api_ids (park_id, api_id, api_abbreviation, api_name, discovered_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT (park_id) DO UPDATE SET
|
||||
api_id = excluded.api_id,
|
||||
api_abbreviation = excluded.api_abbreviation,
|
||||
api_name = excluded.api_name,
|
||||
discovered_at = excluded.discovered_at
|
||||
`).run(
|
||||
parkId,
|
||||
apiId,
|
||||
apiAbbreviation ?? null,
|
||||
apiName ?? null,
|
||||
new Date().toISOString()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the next park+month to scrape.
|
||||
* Priority: never-scraped first, then oldest scraped_at.
|
||||
* Considers current month through monthsAhead months into the future.
|
||||
*/
|
||||
export function getNextScrapeTarget(
|
||||
db: Database.Database,
|
||||
parkIds: string[],
|
||||
monthsAhead = 12
|
||||
): { parkId: string; year: number; month: number } | null {
|
||||
const now = new Date();
|
||||
|
||||
const candidates: {
|
||||
parkId: string;
|
||||
year: number;
|
||||
month: number;
|
||||
lastScraped: string | null;
|
||||
}[] = [];
|
||||
|
||||
for (const parkId of parkIds) {
|
||||
for (let i = 0; i < monthsAhead; i++) {
|
||||
const d = new Date(now.getFullYear(), now.getMonth() + i, 1);
|
||||
const year = d.getFullYear();
|
||||
const month = d.getMonth() + 1;
|
||||
const prefix = `${year}-${String(month).padStart(2, "0")}`;
|
||||
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT MAX(scraped_at) AS last_scraped
|
||||
FROM park_days
|
||||
WHERE park_id = ? AND date LIKE ? || '-%'`
|
||||
)
|
||||
.get(parkId, prefix) as { last_scraped: string | null };
|
||||
|
||||
candidates.push({ parkId, year, month, lastScraped: row.last_scraped });
|
||||
}
|
||||
}
|
||||
|
||||
// Never-scraped (null) first, then oldest scraped_at
|
||||
candidates.sort((a, b) => {
|
||||
if (!a.lastScraped && !b.lastScraped) return 0;
|
||||
if (!a.lastScraped) return -1;
|
||||
if (!b.lastScraped) return 1;
|
||||
return a.lastScraped.localeCompare(b.lastScraped);
|
||||
});
|
||||
|
||||
const top = candidates[0];
|
||||
return top ? { parkId: top.parkId, year: top.year, month: top.month } : null;
|
||||
}
|
||||
Reference in New Issue
Block a user