// OTIVM server — OTIVM-IV // Per-player SQLite integration. // TESSERA world database (data/otivm.sqlite3) — read-only, unchanged. // Player databases (data/saves/{token}.sqlite3) — one per player, // write-safe (single writer), created from data/create_player_db.sql // on first access. // JSON save files (data/saves/{token}.json) — never deleted, migrated // transparently on first SQLite access if present. // Frontend interface unchanged — GET/POST /api/save/:token same as before. // // Change 3 (OTIVM-IV): background_id from the frontend is now a real // canonical identifier (e.g. 'former_legionary'). On new player creation, // seed actor_parameters from background_starting_values for that background. // The previous default of 'unknown' is kept only as a fallback for legacy // saves that predate Change 3. import Fastify from 'fastify' import fastifyStatic from '@fastify/static' import { readFile, writeFile, mkdir } from 'fs/promises' import { join, dirname } from 'path' import { fileURLToPath } from 'url' import { existsSync } from 'fs' import Database from 'better-sqlite3' const __dirname = dirname(fileURLToPath(import.meta.url)) const ROOT = join(__dirname, '..') const DIST = join(ROOT, 'dist') const SAVES_DIR = join(ROOT, 'data', 'saves') const DB_PATH = join(ROOT, 'data', 'otivm.sqlite3') const SCHEMA_PATH = join(ROOT, 'data', 'create_player_db.sql') await mkdir(SAVES_DIR, { recursive: true }) // ── TESSERA world database (read-only) ────────────────────────────────────── const db = new Database(DB_PATH, { readonly: true }) const stmtEpoch = db.prepare( 'SELECT sl_offset_cm FROM paleo_epochs WHERE epoch_key = ?' ) const stmtH7 = db.prepare(` SELECT h7, AVG(lat) AS lat, AVG(lon) AS lon, SUM(CASE WHEN elev_cm > ? THEN 1 ELSE 0 END) AS h7_land, COUNT(*) AS h9_total FROM tessera_cells WHERE h5 = ? AND status = 2 GROUP BY h7 `) function h3HexToInt(hexStr) { return BigInt('0x' + hexStr) } // ── Per-player database ────────────────────────────────────────────────────── // // One SQLite file per player at data/saves/{token}.sqlite3. // Created from data/create_player_db.sql on first access. // Write-safe: only one process writes to a given file at a time. // The better-sqlite3 instance is opened, used, and closed per request — // no persistent connection pool needed at 128 concurrent players. // Read the schema once at startup — better-sqlite3 exec() requires the full SQL string. import { readFileSync } from 'fs' const PLAYER_SCHEMA_SQL = readFileSync(SCHEMA_PATH, 'utf8') // Open (or create) the player database for a given token. // Returns a better-sqlite3 Database instance — caller must close it. function openPlayerDb(token) { const path = join(SAVES_DIR, `${token}.sqlite3`) const isNew = !existsSync(path) const pdb = new Database(path) pdb.pragma('journal_mode = WAL') pdb.pragma('foreign_keys = ON') if (isNew) { // Run the full schema — idempotent due to IF NOT EXISTS clauses. pdb.exec(PLAYER_SCHEMA_SQL) } return pdb } // ── JSON → SQLite migration ────────────────────────────────────────────────── // // Called once per token when a .json exists but no .sqlite3 exists. // Imports the flat JSON game state into the events and actor_parameters // tables so the behavioral record is preserved. // The JSON file is left in place — never deleted. function migrateJsonToSqlite(token, pdb, json) { const now = new Date().toISOString() const actorId = token const sessionId = token // actor_profile — one row anchoring the actor const background = json.background_id || 'unknown' const name = json.actor_name || 'Mercator' const epoch = 'roman_14bce' const insertProfile = pdb.prepare(` INSERT OR IGNORE INTO actor_profile (actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at) VALUES (?, ?, ?, ?, ?, 3, ?) `) insertProfile.run(actorId, sessionId, background, name, epoch, now) // Seed actor_parameters from background_starting_values if not already seeded const seedParams = pdb.prepare(` INSERT OR IGNORE INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) SELECT ?, parameter_token, 'actor', CASE WHEN parameter_token IN ('auctoritas','clientela','liquiditas','fama', 'disciplina','mercatus_scientia','itineris_scientia','ius_accessus', 'periculum_tolerantia','negotiatio','litterae','officia_burden') THEN 'roman' ELSE 'universal' END, value_true, value_perceived, NULL, confidence_tag, observable_level, 'migration', ? FROM background_starting_values WHERE background_id = ? `) seedParams.run(actorId, now, background) // Override liquiditas with actual value from JSON if available if (json.den !== undefined) { const upsertLiq = pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, 'liquiditas', 'actor', 'roman', ?, ?, 'measured', 'full', 'migration', ?) `) upsertLiq.run(actorId, String(json.den), String(json.den), now) } // Override auctoritas with actual value from JSON if available if (json.aut !== undefined) { const upsertAut = pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, 'auctoritas', 'actor', 'roman', ?, ?, ?, 'indicated', 'partial', 'migration', ?) `) const band = autBand(json.aut) upsertAut.run(actorId, band, band, band, now) } // Import legacy events[] array into the events table const insertEvent = pdb.prepare(` INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at) VALUES (?, ?, ?, ?, ?, ?) `) const eventsArr = json.events || [] for (const ev of eventsArr) { insertEvent.run( actorId, ev.type || 'unknown', ev.route_id || null, ev.route_id ? 'venture' : null, JSON.stringify(ev), ev.timestamp_utc || now ) } // Record the migration itself as an event insertEvent.run( actorId, 'session_start', null, null, JSON.stringify({ source: 'json_migration', schema_version: 3 }), now ) } // Convert raw auctoritas integer (OTIVM-I/II) to ordinal band. function autBand(aut) { if (aut >= 30) return 'distinguished' if (aut >= 15) return 'high' if (aut >= 5) return 'medium' return 'low' } // ── Read player state from SQLite ──────────────────────────────────────────── // // Returns the current game state as the frontend expects it. // Derives the flat JSON shape from the relational records so the // frontend requires zero changes. function readPlayerState(token, pdb) { // Current parameter values — most recent non-superseded row per token const params = pdb.prepare(` SELECT parameter_token, value_true, value_perceived, value_social, confidence_tag, observable_level FROM actor_parameters WHERE actor_id = ? AND superseded_at IS NULL ORDER BY recorded_at DESC `).all(token) const paramMap = {} for (const p of params) { paramMap[p.parameter_token] = p } const den = parseInt(paramMap['liquiditas']?.value_true || '0', 10) const autStr = paramMap['auctoritas']?.value_true || 'low' const aut = autToInt(autStr) // Events — chronological const events = pdb.prepare(` SELECT event_type AS type, ref_id AS route_id, payload, recorded_at AS timestamp_utc FROM events WHERE actor_id = ? ORDER BY recorded_at ASC `).all(token) // Derive dispatches and route_dispatches from event log let dispatches = 0 const route_dispatches = {} const journal_seen = [] let active_dispatch = null let chapter = 1 for (const ev of events) { if (ev.type === 'dispatch_complete' && ev.route_id) { dispatches++ route_dispatches[ev.route_id] = (route_dispatches[ev.route_id] || 0) + 1 } if (ev.type === 'journal_unlock' && ev.route_id) { journal_seen.push({ routeId: ev.route_id, dispatch: route_dispatches[ev.route_id] || 1, }) } if (ev.type === 'venture_start' || ev.type === 'dispatch_start') { try { const p = JSON.parse(ev.payload || '{}') active_dispatch = { route_id: ev.route_id || p.route_id, started_utc: ev.timestamp_utc, duration_ms: p.duration_ms || 0, } } catch { /* ignore malformed payload */ } } if (ev.type === 'venture_complete' || ev.type === 'dispatch_complete') { active_dispatch = null } } // chapter derived from den + aut if (aut >= 30 && den >= 800) chapter = 5 else if (aut >= 15 && den >= 350) chapter = 4 else if (aut >= 5 && den >= 120) chapter = 3 else if (den >= 40) chapter = 2 // actor_profile const profile = pdb.prepare( 'SELECT * FROM actor_profile WHERE actor_id = ? ORDER BY recorded_at DESC LIMIT 1' ).get(token) return { token, background_id: profile?.background_id || 'unknown', actor_name: profile?.actor_name || 'Mercator', den, aut, chapter, dispatches, route_dispatches, journal_seen, active_dispatch, events: events.map(e => ({ type: e.type, route_id: e.route_id, timestamp_utc: e.timestamp_utc, })), created_at: profile?.recorded_at || new Date().toISOString(), schema_version: 3, } } // Convert ordinal band back to a representative integer for chapter logic. function autToInt(band) { switch (band) { case 'distinguished': return 35 case 'high': return 20 case 'medium': return 7 default: return 1 } } // ── Write player state to SQLite ───────────────────────────────────────────── // // Receives the flat JSON game state from the frontend. // Writes parameter changes as new actor_parameters rows (append-only). // Appends new events to the events table. // Marks superseded parameter rows. // // On new player creation (no actor_profile row yet): // - Uses background_id from the request body (sent by frontend after // player chooses on the Prologue tab). // - Seeds actor_parameters from background_starting_values for that background. // - Sets liquiditas to the background's starting_den value from the body. // - Falls back to 'unknown' only if no background_id is present (legacy saves). function writePlayerState(token, pdb, body) { const now = new Date().toISOString() const actorId = token // Ensure actor_profile exists const profile = pdb.prepare( 'SELECT actor_id FROM actor_profile WHERE actor_id = ? LIMIT 1' ).get(actorId) if (!profile) { // New player — use the background_id sent by the frontend. // The Prologue tab ensures this is a canonical identifier before sending. const backgroundId = body.background_id || 'unknown' const actorName = body.actor_name || 'Mercator' pdb.prepare(` INSERT OR IGNORE INTO actor_profile (actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at) VALUES (?, ?, ?, ?, 'roman_14bce', 4, ?) `).run(actorId, actorId, backgroundId, actorName, now) // Seed all parameters from background_starting_values for chosen background. // auctoritas gets value_social seeded from the same row's value_true. pdb.prepare(` INSERT OR IGNORE INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) SELECT ?, parameter_token, 'actor', CASE WHEN parameter_token IN ('auctoritas','clientela','liquiditas','fama', 'disciplina','mercatus_scientia','itineris_scientia','ius_accessus', 'periculum_tolerantia','negotiatio','litterae','officia_burden') THEN 'roman' ELSE 'universal' END, value_true, value_perceived, CASE WHEN parameter_token = 'auctoritas' THEN value_true ELSE NULL END, confidence_tag, observable_level, 'initial', ? FROM background_starting_values WHERE background_id = ? `).run(actorId, now, backgroundId) // Override liquiditas with the den value from the body (which was set // from BACKGROUNDS[x].starting_den on the frontend). This is the // authoritative starting value — the seed row is superseded immediately. if (body.den !== undefined) { // Supersede the seed row pdb.prepare(` UPDATE actor_parameters SET superseded_at = ? WHERE actor_id = ? AND parameter_token = 'liquiditas' AND superseded_at IS NULL `).run(now, actorId) // Insert the confirmed starting value pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, 'liquiditas', 'actor', 'roman', ?, ?, 'measured', 'full', 'initial', ?) `).run(actorId, String(body.den), String(body.den), now) } } // Update liquiditas if den changed if (body.den !== undefined) { updateParam(pdb, actorId, 'liquiditas', String(body.den), String(body.den), now) } // Update auctoritas if aut changed if (body.aut !== undefined) { const band = autBand(body.aut) updateParamWithSocial(pdb, actorId, 'auctoritas', band, band, band, now) } // Append new events — only those not already recorded const lastRecorded = pdb.prepare(` SELECT recorded_at FROM events WHERE actor_id = ? ORDER BY recorded_at DESC LIMIT 1 `).get(actorId) const lastTs = lastRecorded?.recorded_at || '1970-01-01T00:00:00.000Z' const insertEvent = pdb.prepare(` INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at) VALUES (?, ?, ?, ?, ?, ?) `) for (const ev of (body.events || [])) { const ts = ev.timestamp_utc || now if (ts > lastTs) { insertEvent.run( actorId, ev.type || 'unknown', ev.route_id || null, ev.route_id ? 'venture' : null, JSON.stringify(ev), ts ) } } } // Append a new actor_parameters row and supersede the previous one. function updateParam(pdb, actorId, token, valueTrue, valuePerceived, now) { pdb.prepare(` UPDATE actor_parameters SET superseded_at = ? WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL `).run(now, actorId, token) pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, ?, 'actor', 'roman', ?, ?, 'measured', 'full', 'game_state', ?) `).run(actorId, token, valueTrue, valuePerceived, now) } function updateParamWithSocial(pdb, actorId, token, valueTrue, valuePerceived, valueSocial, now) { pdb.prepare(` UPDATE actor_parameters SET superseded_at = ? WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL `).run(now, actorId, token) pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, ?, 'actor', 'roman', ?, ?, ?, 'indicated', 'partial', 'game_state', ?) `).run(actorId, token, valueTrue, valuePerceived, valueSocial, now) } // ── Fastify server ─────────────────────────────────────────────────────────── const fastify = Fastify({ logger: false }) await fastify.register(fastifyStatic, { root: DIST, prefix: '/', }) // GET /api/map/:h5/:epoch // Returns H7-aggregated land/sea classification with real centroids. // h5param: H3 res-5 hex string (e.g. '851e805bfffffff') // epochKey: named epoch from paleo_epochs (e.g. 'roman_14bce') // Response: { epoch_key, sl_offset_cm, h5, cells: [{ h7, lat, lon, h7_land, h9_total, is_land }] } fastify.get('/api/map/:h5/:epoch', async (req, reply) => { const { h5: h5param, epoch: epochKey } = req.params if (!/^[0-9a-f]{15}$/.test(h5param)) { return reply.code(400).send({ error: 'Invalid H5 ID' }) } if (!/^[a-z0-9_]+$/.test(epochKey)) { return reply.code(400).send({ error: 'Invalid epoch key' }) } const epoch = stmtEpoch.get(epochKey) if (!epoch) { return reply.code(404).send({ error: `Unknown epoch: ${epochKey}` }) } const slOffsetCm = epoch.sl_offset_cm let h5int try { h5int = h3HexToInt(h5param) } catch { return reply.code(400).send({ error: 'Malformed H5 ID' }) } const rows = stmtH7.all(slOffsetCm, h5int) if (!rows.length) { return reply.code(404).send({ error: `No data for H5: ${h5param}` }) } const cells = rows.map(row => ({ h7: row.h7.toString(16), lat: row.lat, lon: row.lon, h7_land: row.h7_land, h9_total: row.h9_total, is_land: row.h7_land * 2 > row.h9_total ? 1 : 0, })) return reply.send({ epoch_key: epochKey, sl_offset_cm: slOffsetCm, h5: h5param, cells, }) }) // GET /api/save/:token // Returns the current player state. // Priority: SQLite > JSON migration > 404. fastify.get('/api/save/:token', async (req, reply) => { const { token } = req.params if (!/^[0-9a-f]{8}$/.test(token)) { return reply.code(400).send({ error: 'Invalid token' }) } const sqlitePath = join(SAVES_DIR, `${token}.sqlite3`) const jsonPath = join(SAVES_DIR, `${token}.json`) // Case 1: SQLite exists — read from it if (existsSync(sqlitePath)) { try { const pdb = openPlayerDb(token) const state = readPlayerState(token, pdb) pdb.close() return reply.send(state) } catch (err) { console.error(`[save GET] SQLite read failed for ${token}:`, err.message) return reply.code(500).send({ error: 'Failed to read player database' }) } } // Case 2: JSON exists — migrate to SQLite, return migrated state if (existsSync(jsonPath)) { try { const raw = await readFile(jsonPath, 'utf8') const json = JSON.parse(raw) const pdb = openPlayerDb(token) migrateJsonToSqlite(token, pdb, json) const state = readPlayerState(token, pdb) pdb.close() console.log(`[save GET] Migrated ${token}.json → ${token}.sqlite3`) return reply.send(state) } catch (err) { console.error(`[save GET] Migration failed for ${token}:`, err.message) return reply.code(500).send({ error: 'Failed to migrate save' }) } } // Case 3: Neither exists return reply.code(404).send({ error: 'Save not found' }) }) // POST /api/save/:token // Writes player state to SQLite. // Creates the player database if it does not exist. fastify.post('/api/save/:token', async (req, reply) => { const { token } = req.params if (!/^[0-9a-f]{8}$/.test(token)) { return reply.code(400).send({ error: 'Invalid token' }) } if (!req.body || typeof req.body !== 'object') { return reply.code(400).send({ error: 'Invalid body' }) } try { const pdb = openPlayerDb(token) writePlayerState(token, pdb, req.body) pdb.close() return reply.send({ ok: true }) } catch (err) { console.error(`[save POST] SQLite write failed for ${token}:`, err.message) return reply.code(500).send({ error: 'Failed to write player database' }) } }) fastify.setNotFoundHandler((req, reply) => { reply.sendFile('index.html') }) try { await fastify.listen({ port: 3000, host: '0.0.0.0' }) console.log('OTIVM server running on port 3000 — OTIVM-IV') } catch (err) { console.error(err) process.exit(1) }