feat: initial project scaffold with CI/CD and Docker deployment
Next.js 15 + Tailwind CSS v4 week calendar showing Six Flags park hours. Scrapes the internal CloudFront API, stores results in SQLite. Includes Dockerfile (Debian/Playwright-compatible), docker-compose, and Gitea Actions pipeline that builds and pushes to the container registry. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
11
.dockerignore
Normal file
11
.dockerignore
Normal file
@@ -0,0 +1,11 @@
|
||||
.git
|
||||
.gitea
|
||||
.next
|
||||
node_modules
|
||||
data/
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
.env*
|
||||
npm-debug.log*
|
||||
.DS_Store
|
||||
39
.gitea/workflows/deploy.yml
Normal file
39
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Build and Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set image name
|
||||
id: image
|
||||
run: |
|
||||
IMAGE="${{ vars.REGISTRY_URL }}/josh/sixflagssupercalendar"
|
||||
SHA="${{ github.sha }}"
|
||||
echo "full=${IMAGE}" >> "$GITHUB_OUTPUT"
|
||||
echo "tag_latest=${IMAGE}:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "tag_sha=${IMAGE}:${SHA::8}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
echo "${{ secrets.REGISTRY_TOKEN }}" | \
|
||||
docker login "${{ vars.REGISTRY_URL }}" -u josh --password-stdin
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
docker build \
|
||||
-t "${{ steps.image.outputs.tag_latest }}" \
|
||||
-t "${{ steps.image.outputs.tag_sha }}" \
|
||||
.
|
||||
|
||||
- name: Push image
|
||||
run: |
|
||||
docker push "${{ steps.image.outputs.tag_latest }}"
|
||||
docker push "${{ steps.image.outputs.tag_sha }}"
|
||||
38
.gitignore
vendored
Normal file
38
.gitignore
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/releases
|
||||
!.yarn/versions
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# scraped data — local only, not committed
|
||||
/data/
|
||||
|
||||
# env files
|
||||
.env*
|
||||
!.env.example
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
57
Dockerfile
Normal file
57
Dockerfile
Normal file
@@ -0,0 +1,57 @@
|
||||
# Stage 1: Install all dependencies (dev included — scripts need tsx + playwright)
|
||||
FROM node:22-bookworm-slim AS deps
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends python3 make g++ && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
WORKDIR /app
|
||||
COPY package.json package-lock.json* ./
|
||||
RUN npm ci
|
||||
|
||||
# Stage 2: Build the Next.js app
|
||||
FROM deps AS builder
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Stage 3: Production runner
|
||||
FROM node:22-bookworm-slim AS runner
|
||||
WORKDIR /app
|
||||
|
||||
ENV NODE_ENV=production
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
# Store Playwright browser in a predictable path inside the image
|
||||
ENV PLAYWRIGHT_BROWSERS_PATH=/app/.playwright
|
||||
|
||||
# Create non-root user before copying files so --chown works
|
||||
RUN addgroup --system --gid 1001 nodejs && \
|
||||
adduser --system --uid 1001 nextjs
|
||||
|
||||
# Copy Next.js standalone output
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/public ./public
|
||||
|
||||
# Copy scripts + library source (needed for npm run discover/scrape via tsx)
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/scripts ./scripts
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/lib ./lib
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/package.json ./package.json
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/tsconfig.json ./tsconfig.json
|
||||
|
||||
# Replace standalone's minimal node_modules with full deps
|
||||
# (includes tsx, playwright, and all devDependencies)
|
||||
COPY --from=deps --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||
|
||||
# Install Playwright Chromium browser + all required system libraries.
|
||||
# Runs as root so apt-get works; browser lands in PLAYWRIGHT_BROWSERS_PATH.
|
||||
RUN npx playwright install --with-deps chromium && \
|
||||
chown -R nextjs:nodejs /app/.playwright
|
||||
|
||||
# SQLite data directory — mount a named volume here for persistence
|
||||
RUN mkdir -p /app/data && chown nextjs:nodejs /app/data
|
||||
VOLUME ["/app/data"]
|
||||
|
||||
USER nextjs
|
||||
|
||||
EXPOSE 3000
|
||||
ENV PORT=3000
|
||||
ENV HOSTNAME="0.0.0.0"
|
||||
|
||||
CMD ["node", "server.js"]
|
||||
143
README.md
Normal file
143
README.md
Normal file
@@ -0,0 +1,143 @@
|
||||
# Six Flags Super Calendar
|
||||
|
||||
A week-by-week calendar showing operating hours for all Six Flags Entertainment Group theme parks — including the former Cedar Fair parks. Data is scraped from the Six Flags internal API and stored locally in SQLite.
|
||||
|
||||
## Parks
|
||||
|
||||
24 theme parks across the US, Canada, and Mexico:
|
||||
|
||||
**Six Flags branded** — Great Adventure (NJ), Magic Mountain (CA), Great America (IL), Over Georgia, Over Texas, St. Louis, Fiesta Texas (TX), New England (MA), Discovery Kingdom (CA), Mexico, Great Escape (NY), Darien Lake (NY), Frontier City (OK)
|
||||
|
||||
**Former Cedar Fair** — Cedar Point (OH), Knott's Berry Farm (CA), Canada's Wonderland (ON), Carowinds (NC), Kings Dominion (VA), Kings Island (OH), Valleyfair (MN), Worlds of Fun (MO), Michigan's Adventure (MI), Dorney Park (PA), California's Great America (CA)
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **Next.js 15** (App Router, Server Components, standalone output)
|
||||
- **Tailwind CSS v4** (`@theme {}` CSS variables, no config file)
|
||||
- **SQLite** via `better-sqlite3` — persisted in `/app/data/parks.db`
|
||||
- **Playwright** — one-time headless browser run to discover each park's internal API ID
|
||||
- **Six Flags CloudFront API** — `https://d18car1k0ff81h.cloudfront.net/operating-hours/park/{id}?date=YYYYMM`
|
||||
|
||||
## Local Development
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js 22+
|
||||
- npm
|
||||
|
||||
### Setup
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npx playwright install chromium
|
||||
```
|
||||
|
||||
### Seed the database
|
||||
|
||||
Run once to discover each park's internal API ID (opens a headless browser per park):
|
||||
|
||||
```bash
|
||||
npm run discover
|
||||
```
|
||||
|
||||
Then scrape operating hours for the full year:
|
||||
|
||||
```bash
|
||||
npm run scrape
|
||||
```
|
||||
|
||||
To force a full re-scrape (ignores the 7-day staleness window):
|
||||
|
||||
```bash
|
||||
npm run scrape:force
|
||||
```
|
||||
|
||||
### Run the dev server
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000). Navigate weeks with the `←` / `→` buttons or pass `?week=YYYY-MM-DD` directly.
|
||||
|
||||
---
|
||||
|
||||
## Deployment
|
||||
|
||||
### Docker (standalone)
|
||||
|
||||
The app uses Next.js standalone output. The SQLite database is stored in a Docker volume at `/app/data`.
|
||||
|
||||
#### Build and run locally
|
||||
|
||||
```bash
|
||||
docker compose up --build
|
||||
```
|
||||
|
||||
Or pull from the registry:
|
||||
|
||||
```bash
|
||||
REGISTRY_URL=your.registry.host docker compose up
|
||||
```
|
||||
|
||||
#### Seed the database inside the container
|
||||
|
||||
The production image includes Playwright and Chromium, so discovery and scraping can be run directly against the running container's volume.
|
||||
|
||||
```bash
|
||||
# Discover API IDs for all parks (one-time, opens headless browser per park)
|
||||
docker compose exec web npm run discover
|
||||
|
||||
# Scrape operating hours for the full year
|
||||
docker compose exec web npm run scrape
|
||||
```
|
||||
|
||||
Or as one-off containers against the named volume:
|
||||
|
||||
```bash
|
||||
docker run --rm -v sixflagssupercalendar_park_data:/app/data \
|
||||
your.registry.host/josh/sixflagssupercalendar:latest \
|
||||
npm run discover
|
||||
|
||||
docker run --rm -v sixflagssupercalendar_park_data:/app/data \
|
||||
your.registry.host/josh/sixflagssupercalendar:latest \
|
||||
npm run scrape
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### CI/CD (Gitea Actions)
|
||||
|
||||
The pipeline is defined at [`.gitea/workflows/deploy.yml`](.gitea/workflows/deploy.yml).
|
||||
|
||||
**Trigger:** Push to `main`
|
||||
|
||||
**Steps:**
|
||||
1. Checkout code
|
||||
2. Log in to the Gitea container registry
|
||||
3. Build and tag the image as `:latest` and `:<short-sha>`
|
||||
4. Push both tags
|
||||
|
||||
#### Required configuration in Gitea
|
||||
|
||||
| Type | Name | Value |
|
||||
|------|------|-------|
|
||||
| Variable | `REGISTRY_URL` | Your registry host, e.g. `gitea.example.com` |
|
||||
| Secret | `REGISTRY_TOKEN` | A Gitea access token with `package:write` scope |
|
||||
|
||||
Set these under **Repository → Settings → Actions → Variables / Secrets**.
|
||||
|
||||
#### Upstream remote
|
||||
|
||||
```bash
|
||||
git remote add origin https://gitea.thewrightserver.net/josh/SixFlagsSuperCalendar.git
|
||||
git push -u origin master
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Refresh
|
||||
|
||||
The scrape job skips any park+month combination scraped within the last 7 days. To keep data current, run `npm run scrape` (or `scrape:force`) on a schedule — weekly is sufficient for a season calendar.
|
||||
|
||||
Parks and months not yet in the database show a `—` placeholder in the UI. Parks with no hours data on a given day show "Closed".
|
||||
59
app/api/parks/route.ts
Normal file
59
app/api/parks/route.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { PARKS } from "@/lib/parks";
|
||||
import { openDb, getMonthCalendar } from "@/lib/db";
|
||||
import type { Park } from "@/lib/scrapers/types";
|
||||
|
||||
export interface ParksApiResponse {
|
||||
parks: Park[];
|
||||
calendar: Record<string, boolean[]>;
|
||||
month: string;
|
||||
daysInMonth: number;
|
||||
}
|
||||
|
||||
function getDaysInMonth(year: number, month: number): number {
|
||||
return new Date(year, month, 0).getDate();
|
||||
}
|
||||
|
||||
function parseMonthParam(
|
||||
monthParam: string | null
|
||||
): { year: number; month: number } | null {
|
||||
if (!monthParam) return null;
|
||||
const match = monthParam.match(/^(\d{4})-(\d{2})$/);
|
||||
if (!match) return null;
|
||||
const year = parseInt(match[1], 10);
|
||||
const month = parseInt(match[2], 10);
|
||||
if (month < 1 || month > 12) return null;
|
||||
return { year, month };
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
const monthParam = request.nextUrl.searchParams.get("month");
|
||||
const parsed = parseMonthParam(monthParam);
|
||||
|
||||
if (!parsed) {
|
||||
return NextResponse.json(
|
||||
{ error: "Invalid or missing ?month=YYYY-MM parameter" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
const { year, month } = parsed;
|
||||
const daysInMonth = getDaysInMonth(year, month);
|
||||
|
||||
const db = openDb();
|
||||
const calendar = getMonthCalendar(db, year, month);
|
||||
db.close();
|
||||
|
||||
const response: ParksApiResponse = {
|
||||
parks: PARKS,
|
||||
calendar,
|
||||
month: `${year}-${String(month).padStart(2, "0")}`,
|
||||
daysInMonth,
|
||||
};
|
||||
|
||||
return NextResponse.json(response, {
|
||||
headers: {
|
||||
"Cache-Control": "public, s-maxage=3600, stale-while-revalidate=86400",
|
||||
},
|
||||
});
|
||||
}
|
||||
46
app/globals.css
Normal file
46
app/globals.css
Normal file
@@ -0,0 +1,46 @@
|
||||
@import "tailwindcss";
|
||||
|
||||
@theme {
|
||||
--color-bg: #0a0f1e;
|
||||
--color-surface: #111827;
|
||||
--color-surface-2: #1a2235;
|
||||
--color-border: #1f2d45;
|
||||
--color-text: #f1f5f9;
|
||||
--color-text-muted: #64748b;
|
||||
--color-text-dim: #334155;
|
||||
|
||||
--color-open-bg: #052e16;
|
||||
--color-open-border: #166534;
|
||||
--color-open-text: #4ade80;
|
||||
--color-open-hours: #bbf7d0;
|
||||
|
||||
--color-today-bg: #0c1a3d;
|
||||
--color-today-border: #2563eb;
|
||||
--color-today-text: #93c5fd;
|
||||
|
||||
--color-weekend-header: #141f35;
|
||||
}
|
||||
|
||||
:root {
|
||||
background-color: var(--color-bg);
|
||||
color: var(--color-text);
|
||||
font-family: ui-sans-serif, system-ui, -apple-system, sans-serif;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
}
|
||||
::-webkit-scrollbar-track {
|
||||
background: var(--color-bg);
|
||||
}
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: var(--color-border);
|
||||
border-radius: 3px;
|
||||
}
|
||||
15
app/layout.tsx
Normal file
15
app/layout.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
import type { Metadata } from "next";
|
||||
import "./globals.css";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Six Flags Calendar",
|
||||
description: "Theme park operating calendars at a glance",
|
||||
};
|
||||
|
||||
export default function RootLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<body>{children}</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
137
app/page.tsx
Normal file
137
app/page.tsx
Normal file
@@ -0,0 +1,137 @@
|
||||
import { WeekCalendar } from "@/components/WeekCalendar";
|
||||
import { WeekNav } from "@/components/WeekNav";
|
||||
import { PARKS } from "@/lib/parks";
|
||||
import { openDb, getDateRange } from "@/lib/db";
|
||||
|
||||
interface PageProps {
|
||||
searchParams: Promise<{ week?: string }>;
|
||||
}
|
||||
|
||||
function getWeekStart(param: string | undefined): string {
|
||||
if (param && /^\d{4}-\d{2}-\d{2}$/.test(param)) {
|
||||
const d = new Date(param + "T00:00:00");
|
||||
if (!isNaN(d.getTime())) {
|
||||
// Snap to Sunday
|
||||
d.setDate(d.getDate() - d.getDay());
|
||||
return d.toISOString().slice(0, 10);
|
||||
}
|
||||
}
|
||||
const today = new Date();
|
||||
today.setDate(today.getDate() - today.getDay());
|
||||
return today.toISOString().slice(0, 10);
|
||||
}
|
||||
|
||||
function getWeekDates(sundayIso: string): string[] {
|
||||
return Array.from({ length: 7 }, (_, i) => {
|
||||
const d = new Date(sundayIso + "T00:00:00");
|
||||
d.setDate(d.getDate() + i);
|
||||
return d.toISOString().slice(0, 10);
|
||||
});
|
||||
}
|
||||
|
||||
export default async function HomePage({ searchParams }: PageProps) {
|
||||
const params = await searchParams;
|
||||
const weekStart = getWeekStart(params.week);
|
||||
const weekDates = getWeekDates(weekStart);
|
||||
const endDate = weekDates[6];
|
||||
|
||||
const db = openDb();
|
||||
const data = getDateRange(db, weekStart, endDate);
|
||||
db.close();
|
||||
|
||||
// Count how many days have any scraped data (to show empty state)
|
||||
const scrapedCount = Object.values(data).reduce(
|
||||
(sum, parkData) => sum + Object.keys(parkData).length,
|
||||
0
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={{ minHeight: "100vh", background: "var(--color-bg)" }}>
|
||||
{/* Header */}
|
||||
<header style={{
|
||||
borderBottom: "1px solid var(--color-border)",
|
||||
padding: "16px 24px",
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "space-between",
|
||||
gap: 16,
|
||||
position: "sticky",
|
||||
top: 0,
|
||||
zIndex: 20,
|
||||
background: "var(--color-bg)",
|
||||
}}>
|
||||
<div style={{ display: "flex", alignItems: "baseline", gap: 10 }}>
|
||||
<span style={{ fontSize: "1rem", fontWeight: 700, color: "var(--color-text)", letterSpacing: "-0.02em" }}>
|
||||
Six Flags Calendar
|
||||
</span>
|
||||
<span style={{ fontSize: "0.75rem", color: "var(--color-text-muted)" }}>
|
||||
{PARKS.length} parks
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<WeekNav weekStart={weekStart} weekDates={weekDates} />
|
||||
|
||||
<Legend />
|
||||
</header>
|
||||
|
||||
{/* Calendar */}
|
||||
<main style={{ padding: "0 24px 40px" }}>
|
||||
{scrapedCount === 0 ? (
|
||||
<EmptyState />
|
||||
) : (
|
||||
<WeekCalendar parks={PARKS} weekDates={weekDates} data={data} />
|
||||
)}
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function Legend() {
|
||||
return (
|
||||
<div style={{ display: "flex", alignItems: "center", gap: 16, fontSize: "0.72rem", color: "var(--color-text-muted)" }}>
|
||||
<span style={{ display: "flex", alignItems: "center", gap: 5 }}>
|
||||
<span style={{
|
||||
display: "inline-block", width: 28, height: 14, borderRadius: 3,
|
||||
background: "var(--color-open-bg)", border: "1px solid var(--color-open-border)",
|
||||
}} />
|
||||
Open
|
||||
</span>
|
||||
<span style={{ display: "flex", alignItems: "center", gap: 5 }}>
|
||||
<span style={{ color: "var(--color-text-dim)" }}>Closed</span>
|
||||
<span style={{ color: "var(--color-border)" }}>·</span>
|
||||
<span style={{ color: "var(--color-text-dim)" }}>— no data</span>
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function EmptyState() {
|
||||
return (
|
||||
<div style={{
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
padding: "80px 24px",
|
||||
gap: 12,
|
||||
color: "var(--color-text-muted)",
|
||||
}}>
|
||||
<div style={{ fontSize: "2rem" }}>📅</div>
|
||||
<div style={{ fontSize: "1rem", fontWeight: 600, color: "var(--color-text)" }}>No data scraped yet</div>
|
||||
<div style={{ fontSize: "0.85rem", textAlign: "center", lineHeight: 1.6 }}>
|
||||
Run the following to populate the calendar:
|
||||
</div>
|
||||
<pre style={{
|
||||
background: "var(--color-surface)",
|
||||
border: "1px solid var(--color-border)",
|
||||
borderRadius: 8,
|
||||
padding: "12px 20px",
|
||||
fontSize: "0.8rem",
|
||||
color: "var(--color-text)",
|
||||
lineHeight: 1.8,
|
||||
}}>
|
||||
npm run discover{"\n"}npm run scrape
|
||||
</pre>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
123
components/CalendarGrid.tsx
Normal file
123
components/CalendarGrid.tsx
Normal file
@@ -0,0 +1,123 @@
|
||||
import type { Park } from "@/lib/scrapers/types";
|
||||
|
||||
interface CalendarGridProps {
|
||||
parks: Park[];
|
||||
calendar: Record<string, boolean[]>;
|
||||
daysInMonth: number;
|
||||
year: number;
|
||||
month: number;
|
||||
}
|
||||
|
||||
const DOW_LABELS = ["Su", "Mo", "Tu", "We", "Th", "Fr", "Sa"];
|
||||
|
||||
export function CalendarGrid({
|
||||
parks,
|
||||
calendar,
|
||||
daysInMonth,
|
||||
year,
|
||||
month,
|
||||
}: CalendarGridProps) {
|
||||
const days = Array.from({ length: daysInMonth }, (_, i) => i + 1);
|
||||
const today = new Date();
|
||||
const todayDay =
|
||||
today.getFullYear() === year && today.getMonth() + 1 === month
|
||||
? today.getDate()
|
||||
: null;
|
||||
|
||||
return (
|
||||
<div className="overflow-x-auto">
|
||||
<table className="border-collapse text-sm w-full min-w-max">
|
||||
<thead>
|
||||
<tr>
|
||||
<th
|
||||
className="sticky left-0 z-10 px-3 py-2 text-left font-medium border-b min-w-40"
|
||||
style={{
|
||||
backgroundColor: "var(--color-bg)",
|
||||
color: "var(--color-text-muted)",
|
||||
borderColor: "var(--color-border)",
|
||||
}}
|
||||
>
|
||||
Park
|
||||
</th>
|
||||
{days.map((day) => {
|
||||
const dow = new Date(year, month - 1, day).getDay();
|
||||
const isWeekend = dow === 0 || dow === 6;
|
||||
const isToday = day === todayDay;
|
||||
return (
|
||||
<th
|
||||
key={day}
|
||||
className="px-1 py-2 text-center font-normal w-8 border-b"
|
||||
style={{
|
||||
color: isWeekend
|
||||
? "var(--color-text)"
|
||||
: "var(--color-text-muted)",
|
||||
borderColor: "var(--color-border)",
|
||||
backgroundColor: isToday
|
||||
? "var(--color-surface)"
|
||||
: undefined,
|
||||
}}
|
||||
>
|
||||
<div className="text-xs">{DOW_LABELS[dow]}</div>
|
||||
<div
|
||||
style={
|
||||
isToday
|
||||
? { fontWeight: 700, color: "var(--color-open)" }
|
||||
: undefined
|
||||
}
|
||||
>
|
||||
{day}
|
||||
</div>
|
||||
</th>
|
||||
);
|
||||
})}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{parks.map((park) => {
|
||||
const parkDays = calendar[park.id] ?? [];
|
||||
return (
|
||||
<tr key={park.id}>
|
||||
<td
|
||||
className="sticky left-0 z-10 px-3 py-1 text-xs border-b whitespace-nowrap"
|
||||
style={{
|
||||
backgroundColor: "var(--color-bg)",
|
||||
color: "var(--color-text-muted)",
|
||||
borderColor: "var(--color-border)",
|
||||
}}
|
||||
>
|
||||
{park.shortName}
|
||||
</td>
|
||||
{days.map((day) => {
|
||||
const isOpen = parkDays[day - 1] ?? false;
|
||||
const isToday = day === todayDay;
|
||||
return (
|
||||
<td
|
||||
key={day}
|
||||
className="px-1 py-1 text-center border-b"
|
||||
style={{
|
||||
borderColor: "var(--color-border)",
|
||||
backgroundColor: isToday
|
||||
? "rgba(30,41,59,0.3)"
|
||||
: undefined,
|
||||
}}
|
||||
title={`${park.shortName} — ${month}/${day}: ${isOpen ? "Open" : "Closed"}`}
|
||||
>
|
||||
<span
|
||||
className="inline-block w-5 h-5 rounded-sm"
|
||||
style={{
|
||||
backgroundColor: isOpen
|
||||
? "var(--color-open)"
|
||||
: "var(--color-closed)",
|
||||
}}
|
||||
/>
|
||||
</td>
|
||||
);
|
||||
})}
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
55
components/MonthNav.tsx
Normal file
55
components/MonthNav.tsx
Normal file
@@ -0,0 +1,55 @@
|
||||
"use client";
|
||||
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
interface MonthNavProps {
|
||||
currentYear: number;
|
||||
currentMonth: number;
|
||||
}
|
||||
|
||||
const MONTH_NAMES = [
|
||||
"January", "February", "March", "April", "May", "June",
|
||||
"July", "August", "September", "October", "November", "December",
|
||||
];
|
||||
|
||||
function addMonths(year: number, month: number, delta: number) {
|
||||
const d = new Date(year, month - 1 + delta, 1);
|
||||
return { year: d.getFullYear(), month: d.getMonth() + 1 };
|
||||
}
|
||||
|
||||
function formatParam(year: number, month: number) {
|
||||
return `${year}-${String(month).padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
export function MonthNav({ currentYear, currentMonth }: MonthNavProps) {
|
||||
const router = useRouter();
|
||||
|
||||
function navigate(delta: -1 | 1) {
|
||||
const { year, month } = addMonths(currentYear, currentMonth, delta);
|
||||
router.push(`/?month=${formatParam(year, month)}`);
|
||||
}
|
||||
|
||||
const btnStyle = {
|
||||
backgroundColor: "var(--color-surface)",
|
||||
border: "1px solid var(--color-border)",
|
||||
color: "var(--color-text-muted)",
|
||||
padding: "4px 12px",
|
||||
borderRadius: "6px",
|
||||
cursor: "pointer",
|
||||
fontSize: "1rem",
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={{ display: "flex", alignItems: "center", gap: "16px" }}>
|
||||
<button onClick={() => navigate(-1)} style={btnStyle} aria-label="Previous month">
|
||||
←
|
||||
</button>
|
||||
<h1 style={{ fontSize: "1.25rem", fontWeight: 600, color: "var(--color-text)", margin: 0 }}>
|
||||
{MONTH_NAMES[currentMonth - 1]} {currentYear}
|
||||
</h1>
|
||||
<button onClick={() => navigate(1)} style={btnStyle} aria-label="Next month">
|
||||
→
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
261
components/WeekCalendar.tsx
Normal file
261
components/WeekCalendar.tsx
Normal file
@@ -0,0 +1,261 @@
|
||||
import type { Park } from "@/lib/scrapers/types";
|
||||
import type { DayData } from "@/lib/db";
|
||||
|
||||
interface WeekCalendarProps {
|
||||
parks: Park[];
|
||||
weekDates: string[]; // 7 dates, YYYY-MM-DD, Sun–Sat
|
||||
data: Record<string, Record<string, DayData>>; // parkId → date → DayData
|
||||
}
|
||||
|
||||
const DOW = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
|
||||
const MONTHS = [
|
||||
"Jan","Feb","Mar","Apr","May","Jun",
|
||||
"Jul","Aug","Sep","Oct","Nov","Dec",
|
||||
];
|
||||
|
||||
|
||||
function parseDate(iso: string) {
|
||||
const d = new Date(iso + "T00:00:00");
|
||||
return {
|
||||
month: d.getMonth(),
|
||||
day: d.getDate(),
|
||||
dow: d.getDay(),
|
||||
isWeekend: d.getDay() === 0 || d.getDay() === 6,
|
||||
};
|
||||
}
|
||||
|
||||
export function WeekCalendar({ parks, weekDates, data }: WeekCalendarProps) {
|
||||
const today = new Date().toISOString().slice(0, 10);
|
||||
const parsedDates = weekDates.map(parseDate);
|
||||
|
||||
// Detect month boundaries for column headers
|
||||
const firstMonth = parsedDates[0].month;
|
||||
const firstYear = new Date(weekDates[0] + "T00:00:00").getFullYear();
|
||||
|
||||
return (
|
||||
<div style={{ overflowX: "auto", overflowY: "visible" }}>
|
||||
<table style={{
|
||||
borderCollapse: "collapse",
|
||||
width: "100%",
|
||||
minWidth: 700,
|
||||
tableLayout: "fixed",
|
||||
}}>
|
||||
<colgroup>
|
||||
{/* Park name column */}
|
||||
<col style={{ width: 172 }} />
|
||||
{weekDates.map((d) => (
|
||||
<col key={d} style={{ width: 120 }} />
|
||||
))}
|
||||
</colgroup>
|
||||
|
||||
<thead>
|
||||
{/* Month header — only show if we cross a month boundary */}
|
||||
{parsedDates.some((d) => d.month !== firstMonth) && (
|
||||
<tr>
|
||||
<th style={thParkStyle} />
|
||||
{weekDates.map((date, i) => {
|
||||
const pd = parsedDates[i];
|
||||
// Only render the month label on the first day of each new month (or the first column)
|
||||
const showMonth = i === 0 || pd.month !== parsedDates[i - 1].month;
|
||||
return (
|
||||
<th key={date} style={{
|
||||
...thDayStyle,
|
||||
background: pd.isWeekend ? "var(--color-weekend-header)" : "var(--color-bg)",
|
||||
paddingBottom: 0,
|
||||
paddingTop: 8,
|
||||
fontSize: "0.7rem",
|
||||
color: "var(--color-text-muted)",
|
||||
letterSpacing: "0.06em",
|
||||
textTransform: "uppercase",
|
||||
borderBottom: "none",
|
||||
}}>
|
||||
{showMonth ? `${MONTHS[pd.month]} ${new Date(date + "T00:00:00").getFullYear() !== firstYear ? new Date(date + "T00:00:00").getFullYear() : ""}` : ""}
|
||||
</th>
|
||||
);
|
||||
})}
|
||||
</tr>
|
||||
)}
|
||||
|
||||
{/* Day header */}
|
||||
<tr>
|
||||
<th style={thParkStyle}>
|
||||
<span style={{ color: "var(--color-text-muted)", fontSize: "0.7rem", letterSpacing: "0.08em", textTransform: "uppercase" }}>
|
||||
Park
|
||||
</span>
|
||||
</th>
|
||||
{weekDates.map((date, i) => {
|
||||
const pd = parsedDates[i];
|
||||
const isToday = date === today;
|
||||
return (
|
||||
<th key={date} style={{
|
||||
...thDayStyle,
|
||||
background: isToday
|
||||
? "var(--color-today-bg)"
|
||||
: pd.isWeekend
|
||||
? "var(--color-weekend-header)"
|
||||
: "var(--color-bg)",
|
||||
borderBottom: isToday
|
||||
? "2px solid var(--color-today-border)"
|
||||
: `1px solid var(--color-border)`,
|
||||
}}>
|
||||
<div style={{
|
||||
fontSize: "0.7rem",
|
||||
textTransform: "uppercase",
|
||||
letterSpacing: "0.06em",
|
||||
color: isToday ? "var(--color-today-text)" : pd.isWeekend ? "var(--color-text)" : "var(--color-text-muted)",
|
||||
marginBottom: 2,
|
||||
}}>
|
||||
{DOW[pd.dow]}
|
||||
</div>
|
||||
<div style={{
|
||||
fontSize: "1.05rem",
|
||||
fontWeight: isToday ? 700 : pd.isWeekend ? 600 : 400,
|
||||
color: isToday ? "var(--color-today-text)" : pd.isWeekend ? "var(--color-text)" : "var(--color-text-muted)",
|
||||
}}>
|
||||
{pd.day}
|
||||
</div>
|
||||
</th>
|
||||
);
|
||||
})}
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
<tbody>
|
||||
{parks.map((park, parkIdx) => {
|
||||
const parkData = data[park.id] ?? {};
|
||||
return (
|
||||
<tr
|
||||
key={park.id}
|
||||
style={{
|
||||
background: parkIdx % 2 === 0 ? "var(--color-bg)" : "var(--color-surface)",
|
||||
}}
|
||||
>
|
||||
{/* Park name */}
|
||||
<td style={{
|
||||
...tdParkStyle,
|
||||
background: parkIdx % 2 === 0 ? "var(--color-bg)" : "var(--color-surface)",
|
||||
}}>
|
||||
<div style={{ fontWeight: 500, color: "var(--color-text)", fontSize: "0.8rem", lineHeight: 1.2 }}>
|
||||
{park.shortName}
|
||||
</div>
|
||||
<div style={{ fontSize: "0.65rem", color: "var(--color-text-muted)", marginTop: 2 }}>
|
||||
{park.location.city}, {park.location.state}
|
||||
</div>
|
||||
</td>
|
||||
|
||||
{/* Day cells */}
|
||||
{weekDates.map((date, i) => {
|
||||
const pd = parsedDates[i];
|
||||
const isToday = date === today;
|
||||
const dayData = parkData[date];
|
||||
|
||||
if (!dayData) {
|
||||
// No data scraped yet
|
||||
return (
|
||||
<td key={date} style={{
|
||||
...tdBase,
|
||||
background: isToday ? "var(--color-today-bg)" : pd.isWeekend ? "var(--color-weekend-header)" : "transparent",
|
||||
borderLeft: isToday ? "1px solid var(--color-today-border)" : undefined,
|
||||
borderRight: isToday ? "1px solid var(--color-today-border)" : undefined,
|
||||
}}>
|
||||
<span style={{ color: "var(--color-text-dim)", fontSize: "0.65rem" }}>—</span>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
// Treat open-but-no-hours the same as closed (stale data or missing hours)
|
||||
if (!dayData.isOpen || !dayData.hoursLabel) {
|
||||
return (
|
||||
<td key={date} style={{
|
||||
...tdBase,
|
||||
background: isToday ? "var(--color-today-bg)" : pd.isWeekend ? "var(--color-weekend-header)" : "transparent",
|
||||
borderLeft: isToday ? "1px solid var(--color-today-border)" : undefined,
|
||||
borderRight: isToday ? "1px solid var(--color-today-border)" : undefined,
|
||||
}}>
|
||||
<span style={{ color: "var(--color-text-dim)", fontSize: "0.65rem" }}>Closed</span>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
// Open with confirmed hours
|
||||
return (
|
||||
<td key={date} style={{
|
||||
...tdBase,
|
||||
padding: 4,
|
||||
borderLeft: isToday ? "1px solid var(--color-today-border)" : undefined,
|
||||
borderRight: isToday ? "1px solid var(--color-today-border)" : undefined,
|
||||
}}>
|
||||
<div style={{
|
||||
background: "var(--color-open-bg)",
|
||||
border: "1px solid var(--color-open-border)",
|
||||
borderRadius: 6,
|
||||
padding: "6px 4px",
|
||||
textAlign: "center",
|
||||
height: "100%",
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
}}>
|
||||
<span style={{
|
||||
color: "var(--color-open-hours)",
|
||||
fontSize: "0.72rem",
|
||||
fontWeight: 500,
|
||||
letterSpacing: "-0.01em",
|
||||
whiteSpace: "nowrap",
|
||||
}}>
|
||||
{dayData.hoursLabel}
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
);
|
||||
})}
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Shared styles ──────────────────────────────────────────────────────────
|
||||
|
||||
const thParkStyle: React.CSSProperties = {
|
||||
position: "sticky",
|
||||
left: 0,
|
||||
zIndex: 10,
|
||||
padding: "10px 12px",
|
||||
textAlign: "left",
|
||||
borderBottom: "1px solid var(--color-border)",
|
||||
background: "var(--color-bg)",
|
||||
verticalAlign: "bottom",
|
||||
};
|
||||
|
||||
const thDayStyle: React.CSSProperties = {
|
||||
padding: "10px 8px 8px",
|
||||
textAlign: "center",
|
||||
fontWeight: 400,
|
||||
borderBottom: "1px solid var(--color-border)",
|
||||
borderLeft: "1px solid var(--color-border)",
|
||||
verticalAlign: "bottom",
|
||||
};
|
||||
|
||||
const tdParkStyle: React.CSSProperties = {
|
||||
position: "sticky",
|
||||
left: 0,
|
||||
zIndex: 5,
|
||||
padding: "10px 12px",
|
||||
borderBottom: "1px solid var(--color-border)",
|
||||
borderRight: "1px solid var(--color-border)",
|
||||
whiteSpace: "nowrap",
|
||||
verticalAlign: "middle",
|
||||
};
|
||||
|
||||
const tdBase: React.CSSProperties = {
|
||||
padding: 0,
|
||||
textAlign: "center",
|
||||
verticalAlign: "middle",
|
||||
borderBottom: "1px solid var(--color-border)",
|
||||
borderLeft: "1px solid var(--color-border)",
|
||||
height: 52,
|
||||
};
|
||||
77
components/WeekNav.tsx
Normal file
77
components/WeekNav.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
"use client";
|
||||
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
interface WeekNavProps {
|
||||
weekStart: string; // YYYY-MM-DD (Sunday)
|
||||
weekDates: string[]; // 7 dates YYYY-MM-DD
|
||||
}
|
||||
|
||||
const MONTHS = [
|
||||
"Jan","Feb","Mar","Apr","May","Jun",
|
||||
"Jul","Aug","Sep","Oct","Nov","Dec",
|
||||
];
|
||||
|
||||
function formatLabel(dates: string[]): string {
|
||||
const s = new Date(dates[0] + "T00:00:00");
|
||||
const e = new Date(dates[6] + "T00:00:00");
|
||||
if (s.getFullYear() === e.getFullYear() && s.getMonth() === e.getMonth()) {
|
||||
return `${MONTHS[s.getMonth()]} ${s.getDate()}–${e.getDate()}, ${s.getFullYear()}`;
|
||||
}
|
||||
const startStr = `${MONTHS[s.getMonth()]} ${s.getDate()}`;
|
||||
const endStr = `${MONTHS[e.getMonth()]} ${e.getDate()}, ${e.getFullYear()}`;
|
||||
return `${startStr} – ${endStr}`;
|
||||
}
|
||||
|
||||
function shiftWeek(weekStart: string, delta: number): string {
|
||||
const d = new Date(weekStart + "T00:00:00");
|
||||
d.setDate(d.getDate() + delta * 7);
|
||||
return d.toISOString().slice(0, 10);
|
||||
}
|
||||
|
||||
export function WeekNav({ weekStart, weekDates }: WeekNavProps) {
|
||||
const router = useRouter();
|
||||
const nav = (delta: number) =>
|
||||
router.push(`/?week=${shiftWeek(weekStart, delta)}`);
|
||||
|
||||
return (
|
||||
<div style={{ display: "flex", alignItems: "center", gap: 16 }}>
|
||||
<button
|
||||
onClick={() => nav(-1)}
|
||||
aria-label="Previous week"
|
||||
style={btnStyle}
|
||||
>
|
||||
←
|
||||
</button>
|
||||
|
||||
<span style={{
|
||||
fontSize: "1.1rem",
|
||||
fontWeight: 600,
|
||||
color: "var(--color-text)",
|
||||
minWidth: 220,
|
||||
textAlign: "center",
|
||||
}}>
|
||||
{formatLabel(weekDates)}
|
||||
</span>
|
||||
|
||||
<button
|
||||
onClick={() => nav(1)}
|
||||
aria-label="Next week"
|
||||
style={btnStyle}
|
||||
>
|
||||
→
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const btnStyle: React.CSSProperties = {
|
||||
padding: "6px 14px",
|
||||
borderRadius: 6,
|
||||
border: "1px solid var(--color-border)",
|
||||
background: "var(--color-surface)",
|
||||
color: "var(--color-text-muted)",
|
||||
cursor: "pointer",
|
||||
fontSize: "1rem",
|
||||
lineHeight: 1,
|
||||
};
|
||||
16
docker-compose.yml
Normal file
16
docker-compose.yml
Normal file
@@ -0,0 +1,16 @@
|
||||
services:
|
||||
web:
|
||||
image: ${REGISTRY_URL}/josh/sixflagssupercalendar:latest
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- park_data:/app/data
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
park_data:
|
||||
16
eslint.config.mjs
Normal file
16
eslint.config.mjs
Normal file
@@ -0,0 +1,16 @@
|
||||
import { dirname } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
});
|
||||
|
||||
const eslintConfig = [
|
||||
...compat.extends("next/core-web-vitals", "next/typescript"),
|
||||
];
|
||||
|
||||
export default eslintConfig;
|
||||
218
lib/db.ts
Normal file
218
lib/db.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
import Database from "better-sqlite3";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
const DATA_DIR = path.join(process.cwd(), "data");
|
||||
const DB_PATH = path.join(DATA_DIR, "parks.db");
|
||||
|
||||
export type DbInstance = Database.Database;
|
||||
|
||||
export function openDb(): Database.Database {
|
||||
fs.mkdirSync(DATA_DIR, { recursive: true });
|
||||
const db = new Database(DB_PATH);
|
||||
db.pragma("journal_mode = WAL");
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS park_days (
|
||||
park_id TEXT NOT NULL,
|
||||
date TEXT NOT NULL, -- YYYY-MM-DD
|
||||
is_open INTEGER NOT NULL DEFAULT 0,
|
||||
hours_label TEXT,
|
||||
scraped_at TEXT NOT NULL,
|
||||
PRIMARY KEY (park_id, date)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS park_api_ids (
|
||||
park_id TEXT PRIMARY KEY,
|
||||
api_id INTEGER NOT NULL,
|
||||
api_abbreviation TEXT,
|
||||
api_name TEXT,
|
||||
discovered_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
return db;
|
||||
}
|
||||
|
||||
export function upsertDay(
|
||||
db: Database.Database,
|
||||
parkId: string,
|
||||
date: string,
|
||||
isOpen: boolean,
|
||||
hoursLabel?: string
|
||||
) {
|
||||
db.prepare(`
|
||||
INSERT INTO park_days (park_id, date, is_open, hours_label, scraped_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT (park_id, date) DO UPDATE SET
|
||||
is_open = excluded.is_open,
|
||||
hours_label = excluded.hours_label,
|
||||
scraped_at = excluded.scraped_at
|
||||
`).run(parkId, date, isOpen ? 1 : 0, hoursLabel ?? null, new Date().toISOString());
|
||||
}
|
||||
|
||||
export interface DayData {
|
||||
isOpen: boolean;
|
||||
hoursLabel: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns scraped data for all parks across a date range.
|
||||
* Shape: { parkId: { 'YYYY-MM-DD': DayData } }
|
||||
* Missing dates mean that date hasn't been scraped yet (not necessarily closed).
|
||||
*/
|
||||
export function getDateRange(
|
||||
db: Database.Database,
|
||||
startDate: string,
|
||||
endDate: string
|
||||
): Record<string, Record<string, DayData>> {
|
||||
const rows = db
|
||||
.prepare(
|
||||
`SELECT park_id, date, is_open, hours_label
|
||||
FROM park_days
|
||||
WHERE date >= ? AND date <= ?`
|
||||
)
|
||||
.all(startDate, endDate) as {
|
||||
park_id: string;
|
||||
date: string;
|
||||
is_open: number;
|
||||
hours_label: string | null;
|
||||
}[];
|
||||
|
||||
const result: Record<string, Record<string, DayData>> = {};
|
||||
for (const row of rows) {
|
||||
if (!result[row.park_id]) result[row.park_id] = {};
|
||||
result[row.park_id][row.date] = {
|
||||
isOpen: row.is_open === 1,
|
||||
hoursLabel: row.hours_label,
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Returns a map of parkId → boolean[] (index 0 = day 1) for a given month. */
|
||||
export function getMonthCalendar(
|
||||
db: Database.Database,
|
||||
year: number,
|
||||
month: number
|
||||
): Record<string, boolean[]> {
|
||||
const prefix = `${year}-${String(month).padStart(2, "0")}`;
|
||||
const rows = db
|
||||
.prepare(
|
||||
`SELECT park_id, date, is_open
|
||||
FROM park_days
|
||||
WHERE date LIKE ? || '-%'
|
||||
ORDER BY date`
|
||||
)
|
||||
.all(prefix) as { park_id: string; date: string; is_open: number }[];
|
||||
|
||||
const result: Record<string, boolean[]> = {};
|
||||
for (const row of rows) {
|
||||
if (!result[row.park_id]) result[row.park_id] = [];
|
||||
const day = parseInt(row.date.slice(8), 10);
|
||||
result[row.park_id][day - 1] = row.is_open === 1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** True if the DB already has at least one row for this park+month. */
|
||||
const STALE_AFTER_MS = 7 * 24 * 60 * 60 * 1000; // 1 week
|
||||
|
||||
/** True if the DB has data for this park+month scraped within the last week. */
|
||||
export function isMonthScraped(
|
||||
db: Database.Database,
|
||||
parkId: string,
|
||||
year: number,
|
||||
month: number
|
||||
): boolean {
|
||||
const prefix = `${year}-${String(month).padStart(2, "0")}`;
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT MAX(scraped_at) AS last_scraped
|
||||
FROM park_days
|
||||
WHERE park_id = ? AND date LIKE ? || '-%'`
|
||||
)
|
||||
.get(parkId, prefix) as { last_scraped: string | null };
|
||||
|
||||
if (!row.last_scraped) return false;
|
||||
const ageMs = Date.now() - new Date(row.last_scraped).getTime();
|
||||
return ageMs < STALE_AFTER_MS;
|
||||
}
|
||||
|
||||
export function getApiId(db: Database.Database, parkId: string): number | null {
|
||||
const row = db
|
||||
.prepare("SELECT api_id FROM park_api_ids WHERE park_id = ?")
|
||||
.get(parkId) as { api_id: number } | undefined;
|
||||
return row?.api_id ?? null;
|
||||
}
|
||||
|
||||
export function setApiId(
|
||||
db: Database.Database,
|
||||
parkId: string,
|
||||
apiId: number,
|
||||
apiAbbreviation?: string,
|
||||
apiName?: string
|
||||
) {
|
||||
db.prepare(`
|
||||
INSERT INTO park_api_ids (park_id, api_id, api_abbreviation, api_name, discovered_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT (park_id) DO UPDATE SET
|
||||
api_id = excluded.api_id,
|
||||
api_abbreviation = excluded.api_abbreviation,
|
||||
api_name = excluded.api_name,
|
||||
discovered_at = excluded.discovered_at
|
||||
`).run(
|
||||
parkId,
|
||||
apiId,
|
||||
apiAbbreviation ?? null,
|
||||
apiName ?? null,
|
||||
new Date().toISOString()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the next park+month to scrape.
|
||||
* Priority: never-scraped first, then oldest scraped_at.
|
||||
* Considers current month through monthsAhead months into the future.
|
||||
*/
|
||||
export function getNextScrapeTarget(
|
||||
db: Database.Database,
|
||||
parkIds: string[],
|
||||
monthsAhead = 12
|
||||
): { parkId: string; year: number; month: number } | null {
|
||||
const now = new Date();
|
||||
|
||||
const candidates: {
|
||||
parkId: string;
|
||||
year: number;
|
||||
month: number;
|
||||
lastScraped: string | null;
|
||||
}[] = [];
|
||||
|
||||
for (const parkId of parkIds) {
|
||||
for (let i = 0; i < monthsAhead; i++) {
|
||||
const d = new Date(now.getFullYear(), now.getMonth() + i, 1);
|
||||
const year = d.getFullYear();
|
||||
const month = d.getMonth() + 1;
|
||||
const prefix = `${year}-${String(month).padStart(2, "0")}`;
|
||||
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT MAX(scraped_at) AS last_scraped
|
||||
FROM park_days
|
||||
WHERE park_id = ? AND date LIKE ? || '-%'`
|
||||
)
|
||||
.get(parkId, prefix) as { last_scraped: string | null };
|
||||
|
||||
candidates.push({ parkId, year, month, lastScraped: row.last_scraped });
|
||||
}
|
||||
}
|
||||
|
||||
// Never-scraped (null) first, then oldest scraped_at
|
||||
candidates.sort((a, b) => {
|
||||
if (!a.lastScraped && !b.lastScraped) return 0;
|
||||
if (!a.lastScraped) return -1;
|
||||
if (!b.lastScraped) return 1;
|
||||
return a.lastScraped.localeCompare(b.lastScraped);
|
||||
});
|
||||
|
||||
const top = candidates[0];
|
||||
return top ? { parkId: top.parkId, year: top.year, month: top.month } : null;
|
||||
}
|
||||
255
lib/parks.ts
Normal file
255
lib/parks.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
import type { Park } from "./scrapers/types";
|
||||
|
||||
/**
|
||||
* All parks listed on https://www.sixflags.com/select-park
|
||||
* Slugs verified from that page — used in:
|
||||
* https://www.sixflags.com/{slug}/park-hours?date=YYYY-MM-DD
|
||||
*
|
||||
* Includes former Cedar Fair parks now under the Six Flags Entertainment Group umbrella.
|
||||
*/
|
||||
export const PARKS: Park[] = [
|
||||
// ── Six Flags branded parks ──────────────────────────────────────────────
|
||||
{
|
||||
id: "greatadventure",
|
||||
name: "Six Flags Great Adventure",
|
||||
shortName: "Great Adventure",
|
||||
chain: "sixflags",
|
||||
slug: "greatadventure",
|
||||
location: { lat: 40.1376, lng: -74.4388, city: "Jackson", state: "NJ" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "magicmountain",
|
||||
name: "Six Flags Magic Mountain",
|
||||
shortName: "Magic Mountain",
|
||||
chain: "sixflags",
|
||||
slug: "magicmountain",
|
||||
location: { lat: 34.4252, lng: -118.5973, city: "Valencia", state: "CA" },
|
||||
timezone: "America/Los_Angeles",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "greatamerica",
|
||||
name: "Six Flags Great America",
|
||||
shortName: "Great America",
|
||||
chain: "sixflags",
|
||||
slug: "greatamerica",
|
||||
location: { lat: 42.3702, lng: -87.9358, city: "Gurnee", state: "IL" },
|
||||
timezone: "America/Chicago",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "overgeorgia",
|
||||
name: "Six Flags Over Georgia",
|
||||
shortName: "Over Georgia",
|
||||
chain: "sixflags",
|
||||
slug: "overgeorgia",
|
||||
location: { lat: 33.7718, lng: -84.5494, city: "Austell", state: "GA" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "overtexas",
|
||||
name: "Six Flags Over Texas",
|
||||
shortName: "Over Texas",
|
||||
chain: "sixflags",
|
||||
slug: "overtexas",
|
||||
location: { lat: 32.7554, lng: -97.0639, city: "Arlington", state: "TX" },
|
||||
timezone: "America/Chicago",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "stlouis",
|
||||
name: "Six Flags St. Louis",
|
||||
shortName: "St. Louis",
|
||||
chain: "sixflags",
|
||||
slug: "stlouis",
|
||||
location: { lat: 38.5153, lng: -90.6751, city: "Eureka", state: "MO" },
|
||||
timezone: "America/Chicago",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "fiestatexas",
|
||||
name: "Six Flags Fiesta Texas",
|
||||
shortName: "Fiesta Texas",
|
||||
chain: "sixflags",
|
||||
slug: "fiestatexas",
|
||||
location: { lat: 29.6054, lng: -98.622, city: "San Antonio", state: "TX" },
|
||||
timezone: "America/Chicago",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "newengland",
|
||||
name: "Six Flags New England",
|
||||
shortName: "New England",
|
||||
chain: "sixflags",
|
||||
slug: "newengland",
|
||||
location: { lat: 42.037, lng: -72.6151, city: "Agawam", state: "MA" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "discoverykingdom",
|
||||
name: "Six Flags Discovery Kingdom",
|
||||
shortName: "Discovery Kingdom",
|
||||
chain: "sixflags",
|
||||
slug: "discoverykingdom",
|
||||
location: { lat: 38.136, lng: -122.2314, city: "Vallejo", state: "CA" },
|
||||
timezone: "America/Los_Angeles",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "mexico",
|
||||
name: "Six Flags Mexico",
|
||||
shortName: "Mexico",
|
||||
chain: "sixflags",
|
||||
slug: "mexico",
|
||||
location: { lat: 19.2982, lng: -99.2146, city: "Mexico City", state: "Mexico" },
|
||||
timezone: "America/Mexico_City",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "greatescape",
|
||||
name: "Six Flags Great Escape",
|
||||
shortName: "Great Escape",
|
||||
chain: "sixflags",
|
||||
slug: "greatescape",
|
||||
location: { lat: 43.3537, lng: -73.6776, city: "Queensbury", state: "NY" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "darienlake",
|
||||
name: "Six Flags Darien Lake",
|
||||
shortName: "Darien Lake",
|
||||
chain: "sixflags",
|
||||
slug: "darienlake",
|
||||
location: { lat: 42.9915, lng: -78.3895, city: "Darien Center", state: "NY" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
// ── Former Cedar Fair theme parks ─────────────────────────────────────────
|
||||
{
|
||||
id: "cedarpoint",
|
||||
name: "Cedar Point",
|
||||
shortName: "Cedar Point",
|
||||
chain: "sixflags",
|
||||
slug: "cedarpoint",
|
||||
location: { lat: 41.4784, lng: -82.6832, city: "Sandusky", state: "OH" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "knotts",
|
||||
name: "Knott's Berry Farm",
|
||||
shortName: "Knott's",
|
||||
chain: "sixflags",
|
||||
slug: "knotts",
|
||||
location: { lat: 33.8442, lng: -117.9989, city: "Buena Park", state: "CA" },
|
||||
timezone: "America/Los_Angeles",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "canadaswonderland",
|
||||
name: "Canada's Wonderland",
|
||||
shortName: "Canada's Wonderland",
|
||||
chain: "sixflags",
|
||||
slug: "canadaswonderland",
|
||||
location: { lat: 43.8426, lng: -79.5396, city: "Vaughan", state: "ON" },
|
||||
timezone: "America/Toronto",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "carowinds",
|
||||
name: "Carowinds",
|
||||
shortName: "Carowinds",
|
||||
chain: "sixflags",
|
||||
slug: "carowinds",
|
||||
location: { lat: 35.1043, lng: -80.9394, city: "Charlotte", state: "NC" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "kingsdominion",
|
||||
name: "Kings Dominion",
|
||||
shortName: "Kings Dominion",
|
||||
chain: "sixflags",
|
||||
slug: "kingsdominion",
|
||||
location: { lat: 37.8357, lng: -77.4463, city: "Doswell", state: "VA" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "kingsisland",
|
||||
name: "Kings Island",
|
||||
shortName: "Kings Island",
|
||||
chain: "sixflags",
|
||||
slug: "kingsisland",
|
||||
location: { lat: 39.3442, lng: -84.2696, city: "Mason", state: "OH" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "valleyfair",
|
||||
name: "Valleyfair",
|
||||
shortName: "Valleyfair",
|
||||
chain: "sixflags",
|
||||
slug: "valleyfair",
|
||||
location: { lat: 44.7227, lng: -93.4691, city: "Shakopee", state: "MN" },
|
||||
timezone: "America/Chicago",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "worldsoffun",
|
||||
name: "Worlds of Fun",
|
||||
shortName: "Worlds of Fun",
|
||||
chain: "sixflags",
|
||||
slug: "worldsoffun",
|
||||
location: { lat: 39.1947, lng: -94.5194, city: "Kansas City", state: "MO" },
|
||||
timezone: "America/Chicago",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "miadventure",
|
||||
name: "Michigan's Adventure",
|
||||
shortName: "Michigan's Adventure",
|
||||
chain: "sixflags",
|
||||
slug: "miadventure",
|
||||
location: { lat: 43.3281, lng: -86.2694, city: "Muskegon", state: "MI" },
|
||||
timezone: "America/Detroit",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "dorneypark",
|
||||
name: "Dorney Park",
|
||||
shortName: "Dorney Park",
|
||||
chain: "sixflags",
|
||||
slug: "dorneypark",
|
||||
location: { lat: 40.5649, lng: -75.6063, city: "Allentown", state: "PA" },
|
||||
timezone: "America/New_York",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "cagreatamerica",
|
||||
name: "California's Great America",
|
||||
shortName: "CA Great America",
|
||||
chain: "sixflags",
|
||||
slug: "cagreatamerica",
|
||||
location: { lat: 37.3979, lng: -121.9751, city: "Santa Clara", state: "CA" },
|
||||
timezone: "America/Los_Angeles",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
{
|
||||
id: "frontiercity",
|
||||
name: "Frontier City",
|
||||
shortName: "Frontier City",
|
||||
chain: "sixflags",
|
||||
slug: "frontiercity",
|
||||
location: { lat: 35.5739, lng: -97.4731, city: "Oklahoma City", state: "OK" },
|
||||
timezone: "America/Chicago",
|
||||
website: "https://www.sixflags.com",
|
||||
},
|
||||
];
|
||||
|
||||
export const PARK_MAP = new Map<string, Park>(PARKS.map((p) => [p.id, p]));
|
||||
167
lib/scrapers/sixflags.ts
Normal file
167
lib/scrapers/sixflags.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* Six Flags scraper — calls the internal CloudFront operating-hours API directly.
|
||||
*
|
||||
* API: https://d18car1k0ff81h.cloudfront.net/operating-hours/park/{apiId}?date=YYYYMM
|
||||
* Returns full month data in one request — no browser needed.
|
||||
*
|
||||
* Each park has a numeric API ID that must be discovered first (see scripts/discover.ts).
|
||||
* Once stored in the DB, this scraper never touches a browser again.
|
||||
*
|
||||
* Rate limiting: on 429/503, exponential backoff (30s → 60s → 120s), MAX_RETRIES attempts.
|
||||
*/
|
||||
|
||||
const API_BASE = "https://d18car1k0ff81h.cloudfront.net/operating-hours/park";
|
||||
const MAX_RETRIES = 3;
|
||||
const BASE_BACKOFF_MS = 30_000;
|
||||
|
||||
export class RateLimitError extends Error {
|
||||
constructor(public readonly waitedMs: number) {
|
||||
super(`Rate limited — exhausted ${MAX_RETRIES} retries after ${waitedMs / 1000}s total wait`);
|
||||
this.name = "RateLimitError";
|
||||
}
|
||||
}
|
||||
|
||||
const HEADERS = {
|
||||
"User-Agent":
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 " +
|
||||
"(KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36",
|
||||
Accept: "application/json",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
Referer: "https://www.sixflags.com/",
|
||||
};
|
||||
|
||||
export interface DayResult {
|
||||
date: string; // YYYY-MM-DD
|
||||
isOpen: boolean;
|
||||
hoursLabel?: string;
|
||||
}
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise<void>((r) => setTimeout(r, ms));
|
||||
}
|
||||
|
||||
/** "04/05/2026" → "2026-04-05" */
|
||||
function parseApiDate(d: string): string {
|
||||
const [m, day, y] = d.split("/");
|
||||
return `${y}-${m}-${day}`;
|
||||
}
|
||||
|
||||
interface ApiOperatingItem {
|
||||
timeFrom: string; // "10:30" 24h
|
||||
timeTo: string; // "20:00" 24h
|
||||
}
|
||||
|
||||
interface ApiOperating {
|
||||
operatingTypeName: string; // "Park", "Special Event", etc.
|
||||
items: ApiOperatingItem[];
|
||||
}
|
||||
|
||||
interface ApiDay {
|
||||
date: string;
|
||||
isParkClosed: boolean;
|
||||
operatings?: ApiOperating[];
|
||||
}
|
||||
|
||||
/** "10:30" → "10:30am", "20:00" → "8pm", "12:00" → "12pm" */
|
||||
function fmt24(time: string): string {
|
||||
const [h, m] = time.split(":").map(Number);
|
||||
const period = h >= 12 ? "pm" : "am";
|
||||
const h12 = h % 12 || 12;
|
||||
return m === 0 ? `${h12}${period}` : `${h12}:${String(m).padStart(2, "0")}${period}`;
|
||||
}
|
||||
|
||||
interface ApiResponse {
|
||||
parkId: number;
|
||||
parkAbbreviation: string;
|
||||
parkName: string;
|
||||
dates: ApiDay[];
|
||||
}
|
||||
|
||||
async function fetchApi(url: string, attempt = 0, totalWaitedMs = 0): Promise<ApiResponse> {
|
||||
const res = await fetch(url, { headers: HEADERS });
|
||||
|
||||
if (res.status === 429 || res.status === 503) {
|
||||
const retryAfter = res.headers.get("Retry-After");
|
||||
const waitMs = retryAfter
|
||||
? parseInt(retryAfter) * 1000
|
||||
: BASE_BACKOFF_MS * Math.pow(2, attempt);
|
||||
console.log(
|
||||
` [rate-limited] HTTP ${res.status} — waiting ${waitMs / 1000}s (attempt ${attempt + 1}/${MAX_RETRIES})`
|
||||
);
|
||||
await sleep(waitMs);
|
||||
if (attempt < MAX_RETRIES) return fetchApi(url, attempt + 1, totalWaitedMs + waitMs);
|
||||
throw new RateLimitError(totalWaitedMs + waitMs);
|
||||
}
|
||||
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status} for ${url}`);
|
||||
return res.json() as Promise<ApiResponse>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch operating hours for an entire month in a single API call.
|
||||
* apiId must be pre-discovered via scripts/discover.ts.
|
||||
*/
|
||||
export async function scrapeMonth(
|
||||
apiId: number,
|
||||
year: number,
|
||||
month: number
|
||||
): Promise<DayResult[]> {
|
||||
const dateParam = `${year}${String(month).padStart(2, "0")}`;
|
||||
const url = `${API_BASE}/${apiId}?date=${dateParam}`;
|
||||
|
||||
const data = await fetchApi(url);
|
||||
|
||||
return data.dates.map((d): DayResult => {
|
||||
const date = parseApiDate(d.date);
|
||||
// Prefer the "Park" operating entry; fall back to first entry
|
||||
const operating =
|
||||
d.operatings?.find((o) => o.operatingTypeName === "Park") ??
|
||||
d.operatings?.[0];
|
||||
const item = operating?.items?.[0];
|
||||
const hoursLabel =
|
||||
item?.timeFrom && item?.timeTo
|
||||
? `${fmt24(item.timeFrom)} – ${fmt24(item.timeTo)}`
|
||||
: undefined;
|
||||
// If the API says open but no hours are available, treat as closed
|
||||
const isOpen = !d.isParkClosed && hoursLabel !== undefined;
|
||||
return { date, isOpen, hoursLabel };
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch park info for a given API ID (used during discovery to identify park type).
|
||||
* Uses the current month so there's always some data.
|
||||
*/
|
||||
export async function fetchParkInfo(
|
||||
apiId: number
|
||||
): Promise<Pick<ApiResponse, "parkId" | "parkAbbreviation" | "parkName"> | null> {
|
||||
const now = new Date();
|
||||
const dateParam = `${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, "0")}`;
|
||||
const url = `${API_BASE}/${apiId}?date=${dateParam}`;
|
||||
try {
|
||||
const data = await fetchApi(url);
|
||||
return {
|
||||
parkId: data.parkId,
|
||||
parkAbbreviation: data.parkAbbreviation,
|
||||
parkName: data.parkName,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns true if the API park name looks like a main theme park (not a water park or safari). */
|
||||
export function isMainThemePark(parkName: string): boolean {
|
||||
const lower = parkName.toLowerCase();
|
||||
const waterParkKeywords = [
|
||||
"hurricane harbor",
|
||||
"safari",
|
||||
"water park",
|
||||
"waterpark",
|
||||
"schlitterbahn",
|
||||
"wave pool",
|
||||
"splash",
|
||||
"aquatic",
|
||||
];
|
||||
return !waterParkKeywords.some((kw) => lower.includes(kw));
|
||||
}
|
||||
37
lib/scrapers/types.ts
Normal file
37
lib/scrapers/types.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
export interface Park {
|
||||
id: string;
|
||||
name: string;
|
||||
shortName: string;
|
||||
chain: "sixflags" | string;
|
||||
slug: string;
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
city: string;
|
||||
state: string;
|
||||
};
|
||||
timezone: string;
|
||||
website: string;
|
||||
}
|
||||
|
||||
export interface DayStatus {
|
||||
day: number;
|
||||
isOpen: boolean;
|
||||
hoursLabel?: string;
|
||||
}
|
||||
|
||||
export interface MonthCalendar {
|
||||
parkId: string;
|
||||
year: number;
|
||||
month: number;
|
||||
days: DayStatus[];
|
||||
}
|
||||
|
||||
export interface ScraperAdapter {
|
||||
readonly chain: string;
|
||||
getMonthCalendar(
|
||||
park: Park,
|
||||
year: number,
|
||||
month: number
|
||||
): Promise<MonthCalendar>;
|
||||
}
|
||||
9
next.config.ts
Normal file
9
next.config.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import type { NextConfig } from "next";
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
// better-sqlite3 is a native module — must not be bundled by webpack
|
||||
serverExternalPackages: ["better-sqlite3"],
|
||||
output: "standalone",
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
7490
package-lock.json
generated
Normal file
7490
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
33
package.json
Normal file
33
package.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"name": "sixflags-super-calendar",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"scrape": "tsx scripts/scrape.ts",
|
||||
"scrape:force": "tsx scripts/scrape.ts --rescrape",
|
||||
"discover": "tsx scripts/discover.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^12.8.0",
|
||||
"next": "^15.3.0",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@types/better-sqlite3": "^7.6.13",
|
||||
"@types/node": "^22",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
"eslint": "^9",
|
||||
"eslint-config-next": "^15.3.0",
|
||||
"playwright": "^1.59.1",
|
||||
"tailwindcss": "^4",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
7
postcss.config.mjs
Normal file
7
postcss.config.mjs
Normal file
@@ -0,0 +1,7 @@
|
||||
const config = {
|
||||
plugins: {
|
||||
"@tailwindcss/postcss": {},
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
140
scripts/discover.ts
Normal file
140
scripts/discover.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* One-time discovery script — finds the CloudFront API ID for each park.
|
||||
*
|
||||
* Run this once before using scrape.ts:
|
||||
* npx tsx scripts/discover.ts
|
||||
*
|
||||
* For each park in the registry it:
|
||||
* 1. Opens the park's hours page in a headless browser
|
||||
* 2. Intercepts all calls to the operating-hours CloudFront API
|
||||
* 3. Identifies the main theme park ID (filters out water parks, safari, etc.)
|
||||
* 4. Stores the ID in the database
|
||||
*
|
||||
* Re-running is safe — already-discovered parks are skipped.
|
||||
*/
|
||||
|
||||
import { chromium } from "playwright";
|
||||
import { openDb, getApiId, setApiId, type DbInstance } from "../lib/db";
|
||||
import { PARKS } from "../lib/parks";
|
||||
import { fetchParkInfo, isMainThemePark } from "../lib/scrapers/sixflags";
|
||||
|
||||
const CLOUDFRONT_PATTERN = /operating-hours\/park\/(\d+)/;
|
||||
|
||||
async function discoverParkId(slug: string): Promise<number | null> {
|
||||
const browser = await chromium.launch({ headless: true });
|
||||
try {
|
||||
const context = await browser.newContext({
|
||||
userAgent:
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 " +
|
||||
"(KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36",
|
||||
locale: "en-US",
|
||||
});
|
||||
const page = await context.newPage();
|
||||
|
||||
const capturedIds = new Set<number>();
|
||||
page.on("request", (req) => {
|
||||
const match = req.url().match(CLOUDFRONT_PATTERN);
|
||||
if (match) capturedIds.add(parseInt(match[1]));
|
||||
});
|
||||
|
||||
await page
|
||||
.goto(`https://www.sixflags.com/${slug}/park-hours?date=2026-05-01`, {
|
||||
waitUntil: "networkidle",
|
||||
timeout: 30_000,
|
||||
})
|
||||
.catch(() => null);
|
||||
|
||||
await context.close();
|
||||
|
||||
if (capturedIds.size === 0) return null;
|
||||
|
||||
// Check each captured ID — pick the main theme park (not water park / safari)
|
||||
for (const id of capturedIds) {
|
||||
const info = await fetchParkInfo(id);
|
||||
if (info && isMainThemePark(info.parkName)) {
|
||||
console.log(
|
||||
` → ID ${id} | ${info.parkAbbreviation} | ${info.parkName}`
|
||||
);
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: return the lowest ID (usually the main park)
|
||||
const fallback = Math.min(...capturedIds);
|
||||
console.log(` → fallback to lowest ID: ${fallback}`);
|
||||
return fallback;
|
||||
} finally {
|
||||
await browser.close();
|
||||
}
|
||||
}
|
||||
|
||||
function purgeRemovedParks(db: DbInstance) {
|
||||
const knownIds = new Set(PARKS.map((p) => p.id));
|
||||
|
||||
const staleParkIds = (
|
||||
db.prepare("SELECT DISTINCT park_id FROM park_api_ids").all() as { park_id: string }[]
|
||||
)
|
||||
.map((r) => r.park_id)
|
||||
.filter((id) => !knownIds.has(id));
|
||||
|
||||
if (staleParkIds.length === 0) return;
|
||||
|
||||
console.log(`\nRemoving ${staleParkIds.length} park(s) no longer in registry:`);
|
||||
for (const parkId of staleParkIds) {
|
||||
const days = (
|
||||
db.prepare("SELECT COUNT(*) AS n FROM park_days WHERE park_id = ?").get(parkId) as { n: number }
|
||||
).n;
|
||||
db.prepare("DELETE FROM park_days WHERE park_id = ?").run(parkId);
|
||||
db.prepare("DELETE FROM park_api_ids WHERE park_id = ?").run(parkId);
|
||||
console.log(` removed ${parkId} (${days} day rows deleted)`);
|
||||
}
|
||||
console.log();
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const db = openDb();
|
||||
|
||||
purgeRemovedParks(db);
|
||||
|
||||
for (const park of PARKS) {
|
||||
const existing = getApiId(db, park.id);
|
||||
if (existing !== null) {
|
||||
console.log(`${park.name}: already known (API ID ${existing}) — skip`);
|
||||
continue;
|
||||
}
|
||||
|
||||
process.stdout.write(`${park.name} (${park.slug})... `);
|
||||
|
||||
try {
|
||||
const apiId = await discoverParkId(park.slug);
|
||||
if (apiId === null) {
|
||||
console.log("FAILED — no API IDs captured");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Fetch full info to store name/abbreviation
|
||||
const info = await fetchParkInfo(apiId);
|
||||
setApiId(db, park.id, apiId, info?.parkAbbreviation, info?.parkName);
|
||||
console.log(`done (ID ${apiId})`);
|
||||
} catch (err) {
|
||||
console.log(`ERROR: ${err}`);
|
||||
}
|
||||
|
||||
// Small delay between parks to be polite
|
||||
await new Promise((r) => setTimeout(r, 2000));
|
||||
}
|
||||
|
||||
// Print summary
|
||||
console.log("\n── Discovered IDs ──");
|
||||
for (const park of PARKS) {
|
||||
const id = getApiId(db, park.id);
|
||||
console.log(` ${park.id.padEnd(30)} ${id ?? "NOT FOUND"}`);
|
||||
}
|
||||
|
||||
db.close();
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error("Fatal:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
126
scripts/scrape.ts
Normal file
126
scripts/scrape.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
/**
|
||||
* Scrape job — fetches 2026 operating hours for all parks from the Six Flags API.
|
||||
*
|
||||
* Prerequisite: run `npm run discover` first to populate API IDs.
|
||||
*
|
||||
* Run once and leave it:
|
||||
* npm run scrape
|
||||
*
|
||||
* Skips park+month combos scraped within the last week. Re-run to resume after interruption.
|
||||
* To force a full re-scrape:
|
||||
* npm run scrape:force
|
||||
*
|
||||
* Rate limiting: backs off automatically (30s → 60s → 120s per retry).
|
||||
* After exhausting retries, skips that park+month and continues.
|
||||
*/
|
||||
|
||||
import { openDb, upsertDay, getApiId, isMonthScraped } from "../lib/db";
|
||||
import { PARKS } from "../lib/parks";
|
||||
import { scrapeMonth, RateLimitError } from "../lib/scrapers/sixflags";
|
||||
|
||||
const YEAR = 2026;
|
||||
const MONTHS = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12];
|
||||
const DELAY_MS = 1000; // between successful API calls
|
||||
const FORCE = process.argv.includes("--rescrape");
|
||||
|
||||
function monthLabel(m: number) {
|
||||
return `${YEAR}-${String(m).padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
function pad(n: number, width: number) {
|
||||
return String(n).padStart(width, " ");
|
||||
}
|
||||
|
||||
async function sleep(ms: number) {
|
||||
return new Promise<void>((r) => setTimeout(r, ms));
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const db = openDb();
|
||||
|
||||
// Separate parks with known API IDs from those needing discovery
|
||||
const ready = PARKS.filter((p) => getApiId(db, p.id) !== null);
|
||||
const needsDiscovery = PARKS.filter((p) => getApiId(db, p.id) === null);
|
||||
|
||||
if (needsDiscovery.length > 0) {
|
||||
console.log(
|
||||
`⚠ ${needsDiscovery.length} parks have no API ID — run \`npm run discover\` first:\n` +
|
||||
needsDiscovery.map((p) => ` ${p.id}`).join("\n") +
|
||||
"\n"
|
||||
);
|
||||
}
|
||||
|
||||
if (ready.length === 0) {
|
||||
console.log("No parks ready to scrape. Run: npm run discover");
|
||||
db.close();
|
||||
return;
|
||||
}
|
||||
|
||||
// Build the full work queue: month × park
|
||||
const queue: { month: number; park: (typeof PARKS)[0]; apiId: number }[] = [];
|
||||
for (const month of MONTHS) {
|
||||
for (const park of ready) {
|
||||
if (!FORCE && isMonthScraped(db, park.id, YEAR, month)) continue;
|
||||
queue.push({ month, park, apiId: getApiId(db, park.id)! });
|
||||
}
|
||||
}
|
||||
|
||||
const total = MONTHS.length * ready.length;
|
||||
const skip = total - queue.length;
|
||||
console.log(
|
||||
`Scraping ${YEAR} — ${ready.length} parks × 12 months = ${total} total\n` +
|
||||
`Skipping ${skip} already-scraped. ${queue.length} to fetch.\n`
|
||||
);
|
||||
|
||||
if (queue.length === 0) {
|
||||
console.log("Nothing to do. To force a full re-scrape: npm run scrape:force");
|
||||
db.close();
|
||||
return;
|
||||
}
|
||||
|
||||
let done = 0;
|
||||
let errors = 0;
|
||||
|
||||
for (const { month, park, apiId } of queue) {
|
||||
const counter = `[${pad(done + 1, queue.length.toString().length)}/${queue.length}]`;
|
||||
process.stdout.write(`${counter} ${park.shortName.padEnd(22)} ${monthLabel(month)} ... `);
|
||||
|
||||
try {
|
||||
const days = await scrapeMonth(apiId, YEAR, month);
|
||||
const insertAll = db.transaction(() => {
|
||||
for (const d of days) upsertDay(db, park.id, d.date, d.isOpen, d.hoursLabel);
|
||||
});
|
||||
insertAll();
|
||||
|
||||
const openCount = days.filter((d) => d.isOpen).length;
|
||||
console.log(`${openCount}/${days.length} open`);
|
||||
done++;
|
||||
|
||||
if (done < queue.length) await sleep(DELAY_MS);
|
||||
} catch (err) {
|
||||
if (err instanceof RateLimitError) {
|
||||
console.log(`RATE LIMITED — skipping (re-run to retry)`);
|
||||
} else {
|
||||
console.log(`ERROR: ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
errors++;
|
||||
}
|
||||
}
|
||||
|
||||
const summary = [
|
||||
`\n── Summary ─────────────────────────────`,
|
||||
` Fetched : ${done}`,
|
||||
` Skipped : ${skip}`,
|
||||
` Errors : ${errors}`,
|
||||
` Total : ${total}`,
|
||||
];
|
||||
if (errors > 0) summary.push(`\nRe-run to retry failed months.`);
|
||||
console.log(summary.join("\n"));
|
||||
|
||||
db.close();
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error("Fatal:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
27
tsconfig.json
Normal file
27
tsconfig.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2017",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"incremental": true,
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
Reference in New Issue
Block a user