All checks were successful
Build and Deploy / Build & Push (push) Successful in 3m9s
- Buyout days are now treated as closed unless they carry a Passholder Preview event, in which case they surface as a distinct purple cell in the UI showing "Passholder" + hours - DB gains a special_type column (auto-migrated on next startup) - scrape.ts threads specialType through to upsertDay - debug.ts now shows events, isBuyout, isPassholderPreview, and specialType in the parsed result section Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
108 lines
3.0 KiB
TypeScript
108 lines
3.0 KiB
TypeScript
/**
|
|
* Scrape job — fetches 2026 operating hours for all parks from the Six Flags API.
|
|
*
|
|
* Prerequisite: run `npm run discover` first to populate API IDs.
|
|
*
|
|
* npm run scrape — skips months scraped within the last 7 days
|
|
* npm run scrape:force — re-scrapes everything
|
|
*/
|
|
|
|
import { openDb, upsertDay, getApiId, isMonthScraped } from "../lib/db";
|
|
import { PARKS } from "../lib/parks";
|
|
import { scrapeMonth, RateLimitError } from "../lib/scrapers/sixflags";
|
|
|
|
const YEAR = 2026;
|
|
const MONTHS = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12];
|
|
const DELAY_MS = 1000;
|
|
const FORCE = process.argv.includes("--rescrape");
|
|
|
|
async function sleep(ms: number) {
|
|
return new Promise<void>((r) => setTimeout(r, ms));
|
|
}
|
|
|
|
async function main() {
|
|
const db = openDb();
|
|
|
|
const ready = PARKS.filter((p) => getApiId(db, p.id) !== null);
|
|
const needsDiscovery = PARKS.filter((p) => getApiId(db, p.id) === null);
|
|
|
|
if (needsDiscovery.length > 0) {
|
|
console.log(
|
|
`⚠ ${needsDiscovery.length} park(s) need discovery first: ${needsDiscovery.map((p) => p.id).join(", ")}\n`
|
|
);
|
|
}
|
|
|
|
if (ready.length === 0) {
|
|
console.log("No parks ready — run: npm run discover");
|
|
db.close();
|
|
return;
|
|
}
|
|
|
|
console.log(`Scraping ${YEAR} — ${ready.length} parks\n`);
|
|
|
|
let totalFetched = 0;
|
|
let totalSkipped = 0;
|
|
let totalErrors = 0;
|
|
|
|
for (const park of ready) {
|
|
const apiId = getApiId(db, park.id)!;
|
|
const label = park.shortName.padEnd(22);
|
|
|
|
let openDays = 0;
|
|
let fetched = 0;
|
|
let skipped = 0;
|
|
let errors = 0;
|
|
|
|
process.stdout.write(` ${label} `);
|
|
|
|
for (const month of MONTHS) {
|
|
if (!FORCE && isMonthScraped(db, park.id, YEAR, month)) {
|
|
process.stdout.write("·");
|
|
skipped++;
|
|
continue;
|
|
}
|
|
|
|
try {
|
|
const days = await scrapeMonth(apiId, YEAR, month);
|
|
db.transaction(() => {
|
|
for (const d of days) upsertDay(db, park.id, d.date, d.isOpen, d.hoursLabel, d.specialType);
|
|
})();
|
|
openDays += days.filter((d) => d.isOpen).length;
|
|
fetched++;
|
|
process.stdout.write("█");
|
|
if (fetched + skipped + errors < MONTHS.length) await sleep(DELAY_MS);
|
|
} catch (err) {
|
|
if (err instanceof RateLimitError) {
|
|
process.stdout.write("✗");
|
|
} else {
|
|
process.stdout.write("✗");
|
|
console.error(`\n error: ${err instanceof Error ? err.message : err}`);
|
|
}
|
|
errors++;
|
|
}
|
|
}
|
|
|
|
totalFetched += fetched;
|
|
totalSkipped += skipped;
|
|
totalErrors += errors;
|
|
|
|
if (errors > 0) {
|
|
console.log(` ${errors} error(s)`);
|
|
} else if (skipped === MONTHS.length) {
|
|
console.log(" up to date");
|
|
} else {
|
|
console.log(` ${openDays} open days`);
|
|
}
|
|
}
|
|
|
|
console.log(`\n ${totalFetched} fetched ${totalSkipped} skipped ${totalErrors} errors`);
|
|
if (totalErrors > 0) console.log(" Re-run to retry failed months.");
|
|
|
|
db.close();
|
|
}
|
|
|
|
main().catch((err) => {
|
|
console.error("Fatal:", err);
|
|
process.exit(1);
|
|
});
|