// OTIVM server — OTIVM-IV // Per-player SQLite integration. // TESSERA world database (data/otivm.sqlite3) — read-only, unchanged. // Player databases (data/saves/{token}.sqlite3) — one per player, // write-safe (single writer), created from data/create_player_db.sql // on first access. // JSON save files (data/saves/{token}.json) — never deleted, migrated // transparently on first SQLite access if present. // Frontend interface unchanged — GET/POST /api/save/:token same as before. // // Change 1 (OTIVM-IV): on dispatch_start event, create ventures + // venture_legs rows. On dispatch_complete, close venture with outcome_net. // Change 2 (OTIVM-IV): on den change, write parameter_drift_log row // (trigger_type = 'venture_complete'). On aut change, write // parameter_drift_log row (trigger_type = 'interval_complete'). // // Cost split — placeholder pending a proper cost model: // cost_vectura = 60% of route cost (VECTVRA — freight charge) // cost_portoria = 25% of route cost (PORTORIUM — customs duty) // cost_other = 15% of route cost (horreum, incidentals) // These proportions are fixed constants here, not in the schema. // When a real cost model is designed, this is the only place to change. import Fastify from 'fastify' import fastifyStatic from '@fastify/static' import { readFile, mkdir } from 'fs/promises' import { join, dirname } from 'path' import { fileURLToPath } from 'url' import { existsSync } from 'fs' import { readFileSync } from 'fs' import Database from 'better-sqlite3' const __dirname = dirname(fileURLToPath(import.meta.url)) const ROOT = join(__dirname, '..') const DIST = join(ROOT, 'dist') const SAVES_DIR = join(ROOT, 'data', 'saves') const DB_PATH = join(ROOT, 'data', 'otivm.sqlite3') const SCHEMA_PATH = join(ROOT, 'data', 'create_player_db.sql') await mkdir(SAVES_DIR, { recursive: true }) // ── Cost split constants ───────────────────────────────────────────────────── // Placeholder proportions — change here only when a real cost model is defined. const COST_VECTURA_RATIO = 0.60 const COST_PORTORIA_RATIO = 0.25 // cost_other = remainder (1 - 0.60 - 0.25 = 0.15) // Route mode classification — used for venture_legs.mode // 'olive' and 'wine' are road; 'grain', 'linen' are sea. // Extend this map when new routes are added. const ROUTE_MODE = { olive: 'road', wine: 'road', grain: 'sea', linen: 'sea', } // Route cargo definitions — used for ventures.cargo_type / cargo_unit const ROUTE_CARGO = { olive: { type: 'Olive oil, Garum', unit: 'amphora' }, wine: { type: 'Campanian wine, Wool', unit: 'amphora' }, grain: { type: 'Adriatic grain, Amber', unit: 'modius' }, linen: { type: 'Berber linen, Frankincense',unit: 'talent' }, } // Route H3 waypoints — origin and destination res-5 cell IDs // Sourced from src/constants.js WAYPOINTS and ROUTES const ROUTE_H3 = { olive: { origin: '851e805bfffffff', destination: '851e8333fffffff' }, wine: { origin: '851e8333fffffff', destination: '851e8ba3fffffff' }, grain: { origin: '851e8ba3fffffff', destination: '85386e23fffffff' }, linen: { origin: '85386e23fffffff', destination: '853f5ba7fffffff' }, } // Route cost and profit — sourced from src/constants.js ROUTES // Duplicated here so server/index.js has no dependency on frontend constants. const ROUTE_ECONOMICS = { olive: { cost: 8, profit: 12, duration_ms: 6000 }, wine: { cost: 14, profit: 22, duration_ms: 9000 }, grain: { cost: 24, profit: 40, duration_ms: 12000 }, linen: { cost: 38, profit: 70, duration_ms: 18000 }, } // MS_PER_SIM_DAY — must match src/constants.js const MS_PER_SIM_DAY = 3_000 // ── TESSERA world database (read-only) ────────────────────────────────────── const db = new Database(DB_PATH, { readonly: true }) const stmtEpoch = db.prepare( 'SELECT sl_offset_cm FROM paleo_epochs WHERE epoch_key = ?' ) const stmtH7 = db.prepare(` SELECT h7, AVG(lat) AS lat, AVG(lon) AS lon, SUM(CASE WHEN elev_cm > ? THEN 1 ELSE 0 END) AS h7_land, COUNT(*) AS h9_total FROM tessera_cells WHERE h5 = ? AND status = 2 GROUP BY h7 `) function h3HexToInt(hexStr) { return BigInt('0x' + hexStr) } // ── Per-player database ────────────────────────────────────────────────────── const PLAYER_SCHEMA_SQL = readFileSync(SCHEMA_PATH, 'utf8') function openPlayerDb(token) { const path = join(SAVES_DIR, `${token}.sqlite3`) const isNew = !existsSync(path) const pdb = new Database(path) pdb.pragma('journal_mode = WAL') pdb.pragma('foreign_keys = ON') if (isNew) { pdb.exec(PLAYER_SCHEMA_SQL) } return pdb } // ── UUID generator ─────────────────────────────────────────────────────────── // Simple RFC4122 v4 UUID — no external dependency needed. function uuid() { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, c => { const r = Math.random() * 16 | 0 return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16) }) } // ── JSON → SQLite migration ────────────────────────────────────────────────── function migrateJsonToSqlite(token, pdb, json) { const now = new Date().toISOString() const actorId = token const sessionId = token const background = json.background_id || 'unknown' const name = json.actor_name || 'Mercator' const epoch = 'roman_14bce' pdb.prepare(` INSERT OR IGNORE INTO actor_profile (actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at) VALUES (?, ?, ?, ?, ?, 5, ?) `).run(actorId, sessionId, background, name, epoch, now) pdb.prepare(` INSERT OR IGNORE INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) SELECT ?, parameter_token, 'actor', CASE WHEN parameter_token IN ('auctoritas','clientela','liquiditas','fama', 'disciplina','mercatus_scientia','itineris_scientia','ius_accessus', 'periculum_tolerantia','negotiatio','litterae','officia_burden') THEN 'roman' ELSE 'universal' END, value_true, value_perceived, NULL, confidence_tag, observable_level, 'migration', ? FROM background_starting_values WHERE background_id = ? `).run(actorId, now, background) if (json.den !== undefined) { pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, 'liquiditas', 'actor', 'roman', ?, ?, 'measured', 'full', 'migration', ?) `).run(actorId, String(json.den), String(json.den), now) } if (json.aut !== undefined) { const band = autBand(json.aut) pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, 'auctoritas', 'actor', 'roman', ?, ?, ?, 'indicated', 'partial', 'migration', ?) `).run(actorId, band, band, band, now) } const insertEvent = pdb.prepare(` INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at) VALUES (?, ?, ?, ?, ?, ?) `) for (const ev of (json.events || [])) { insertEvent.run( actorId, ev.type || 'unknown', ev.route_id || null, ev.route_id ? 'venture' : null, JSON.stringify(ev), ev.timestamp_utc || now ) } insertEvent.run( actorId, 'session_start', null, null, JSON.stringify({ source: 'json_migration', schema_version: 5 }), now ) } function autBand(aut) { if (aut >= 30) return 'distinguished' if (aut >= 15) return 'high' if (aut >= 5) return 'medium' return 'low' } // ── Read player state ──────────────────────────────────────────────────────── function readPlayerState(token, pdb) { const params = pdb.prepare(` SELECT parameter_token, value_true, value_perceived, value_social, confidence_tag, observable_level FROM actor_parameters WHERE actor_id = ? AND superseded_at IS NULL ORDER BY recorded_at DESC `).all(token) const paramMap = {} for (const p of params) { paramMap[p.parameter_token] = p } const den = parseInt(paramMap['liquiditas']?.value_true || '0', 10) const autStr = paramMap['auctoritas']?.value_true || 'low' const aut = autToInt(autStr) const events = pdb.prepare(` SELECT event_type AS type, ref_id AS route_id, payload, recorded_at AS timestamp_utc FROM events WHERE actor_id = ? ORDER BY recorded_at ASC `).all(token) let dispatches = 0 const route_dispatches = {} const journal_seen = [] let active_dispatch = null let chapter = 1 for (const ev of events) { if (ev.type === 'dispatch_complete' && ev.route_id) { dispatches++ route_dispatches[ev.route_id] = (route_dispatches[ev.route_id] || 0) + 1 } if (ev.type === 'journal_unlock' && ev.route_id) { journal_seen.push({ routeId: ev.route_id, dispatch: route_dispatches[ev.route_id] || 1, }) } if (ev.type === 'venture_start' || ev.type === 'dispatch_start') { try { const p = JSON.parse(ev.payload || '{}') active_dispatch = { route_id: ev.route_id || p.route_id, started_utc: ev.timestamp_utc, duration_ms: p.duration_ms || 0, } } catch { /* ignore malformed payload */ } } if (ev.type === 'venture_complete' || ev.type === 'dispatch_complete') { active_dispatch = null } } if (aut >= 30 && den >= 800) chapter = 5 else if (aut >= 15 && den >= 350) chapter = 4 else if (aut >= 5 && den >= 120) chapter = 3 else if (den >= 40) chapter = 2 const profile = pdb.prepare( 'SELECT * FROM actor_profile WHERE actor_id = ? ORDER BY recorded_at DESC LIMIT 1' ).get(token) return { token, background_id: profile?.background_id || 'unknown', actor_name: profile?.actor_name || 'Mercator', den, aut, chapter, dispatches, route_dispatches, journal_seen, active_dispatch, events: events.map(e => ({ type: e.type, route_id: e.route_id, timestamp_utc: e.timestamp_utc, })), created_at: profile?.recorded_at || new Date().toISOString(), schema_version: 5, } } function autToInt(band) { switch (band) { case 'distinguished': return 35 case 'high': return 20 case 'medium': return 7 default: return 1 } } // ── Venture helpers ────────────────────────────────────────────────────────── // Find the open (active) venture for this actor, if any. function findActiveVenture(pdb, actorId) { return pdb.prepare(` SELECT venture_id, venture_label FROM ventures WHERE actor_id = ? AND status = 'active' ORDER BY recorded_at DESC LIMIT 1 `).get(actorId) } // Create a venture + venture_leg row on dispatch_start. // Returns the new venture_id. function createVenture(pdb, actorId, routeId, now) { const eco = ROUTE_ECONOMICS[routeId] const cargo = ROUTE_CARGO[routeId] const h3 = ROUTE_H3[routeId] const mode = ROUTE_MODE[routeId] || 'road' if (!eco || !cargo || !h3) return null // unknown route — skip silently const ventureId = uuid() const legId = uuid() const costVectura = Math.round(eco.cost * COST_VECTURA_RATIO * 100) / 100 const costPortoria = Math.round(eco.cost * COST_PORTORIA_RATIO * 100) / 100 const costOther = Math.round((eco.cost - costVectura - costPortoria) * 100) / 100 const durationDays = Math.round(eco.duration_ms / MS_PER_SIM_DAY) // INTEGER const label = `${routeId.charAt(0).toUpperCase() + routeId.slice(1)} route` pdb.prepare(` INSERT INTO ventures (venture_id, actor_id, venture_label, status, cargo_type, cargo_unit, cost_total, recorded_at, started_at) VALUES (?, ?, ?, 'active', ?, ?, ?, ?, ?) `).run(ventureId, actorId, label, cargo.type, cargo.unit, eco.cost, now, now) pdb.prepare(` INSERT INTO venture_legs (leg_id, venture_id, leg_sequence, origin_h3, destination_h3, mode, duration_days, cost_vectura, cost_portoria, cost_other, cost_total, status, recorded_at, started_at) VALUES (?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?) `).run(legId, ventureId, h3.origin, h3.destination, mode, durationDays, costVectura, costPortoria, costOther, eco.cost, now, now) return ventureId } // Close the active venture on dispatch_complete. // Writes outcome_net, completed_at, and closes the leg. function closeVenture(pdb, actorId, routeId, ventureId, now) { const eco = ROUTE_ECONOMICS[routeId] if (!eco) return const outcomeNet = eco.profit - eco.cost pdb.prepare(` UPDATE ventures SET status = 'complete', revenue_total = ?, outcome_net = ?, completed_at = ? WHERE venture_id = ? `).run(eco.profit, outcomeNet, now, ventureId) pdb.prepare(` UPDATE venture_legs SET status = 'complete', completed_at = ? WHERE venture_id = ? AND status = 'active' `).run(now, ventureId) } // ── Drift log helpers ──────────────────────────────────────────────────────── function writeDriftLog(pdb, actorId, paramToken, triggerType, triggerRef, valueBefore, valueAfter, deltaNote, now) { pdb.prepare(` INSERT INTO parameter_drift_log (actor_id, parameter_token, trigger_type, trigger_ref, value_before, value_after, delta_note, recorded_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?) `).run(actorId, paramToken, triggerType, triggerRef, String(valueBefore), String(valueAfter), deltaNote, now) } // ── Write player state ─────────────────────────────────────────────────────── function writePlayerState(token, pdb, body) { const now = new Date().toISOString() const actorId = token // Ensure actor_profile exists const profile = pdb.prepare( 'SELECT actor_id FROM actor_profile WHERE actor_id = ? LIMIT 1' ).get(actorId) if (!profile) { const backgroundId = body.background_id || 'unknown' const actorName = body.actor_name || 'Mercator' pdb.prepare(` INSERT OR IGNORE INTO actor_profile (actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at) VALUES (?, ?, ?, ?, 'roman_14bce', 5, ?) `).run(actorId, actorId, backgroundId, actorName, now) pdb.prepare(` INSERT OR IGNORE INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) SELECT ?, parameter_token, 'actor', CASE WHEN parameter_token IN ('auctoritas','clientela','liquiditas','fama', 'disciplina','mercatus_scientia','itineris_scientia','ius_accessus', 'periculum_tolerantia','negotiatio','litterae','officia_burden') THEN 'roman' ELSE 'universal' END, value_true, value_perceived, CASE WHEN parameter_token = 'auctoritas' THEN value_true ELSE NULL END, confidence_tag, observable_level, 'initial', ? FROM background_starting_values WHERE background_id = ? `).run(actorId, now, backgroundId) if (body.den !== undefined) { pdb.prepare(` UPDATE actor_parameters SET superseded_at = ? WHERE actor_id = ? AND parameter_token = 'liquiditas' AND superseded_at IS NULL `).run(now, actorId) pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, 'liquiditas', 'actor', 'roman', ?, ?, 'measured', 'full', 'initial', ?) `).run(actorId, String(body.den), String(body.den), now) } } // Read current den/aut before any update — needed for drift log const currentLiq = pdb.prepare(` SELECT value_true FROM actor_parameters WHERE actor_id = ? AND parameter_token = 'liquiditas' AND superseded_at IS NULL ORDER BY recorded_at DESC LIMIT 1 `).get(actorId) const currentAut = pdb.prepare(` SELECT value_true FROM actor_parameters WHERE actor_id = ? AND parameter_token = 'auctoritas' AND superseded_at IS NULL ORDER BY recorded_at DESC LIMIT 1 `).get(actorId) const prevDen = currentLiq?.value_true ?? '0' const prevAut = currentAut?.value_true ?? 'low' // ── Process new events (Change 1 + Change 2) ───────────────────────────── const lastRecorded = pdb.prepare(` SELECT recorded_at FROM events WHERE actor_id = ? ORDER BY recorded_at DESC LIMIT 1 `).get(actorId) const lastTs = lastRecorded?.recorded_at || '1970-01-01T00:00:00.000Z' const insertEvent = pdb.prepare(` INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at) VALUES (?, ?, ?, ?, ?, ?) `) for (const ev of (body.events || [])) { const ts = ev.timestamp_utc || now if (ts <= lastTs) continue const routeId = ev.route_id || null // Change 1 — venture creation on dispatch_start if (ev.type === 'dispatch_start' && routeId) { const ventureId = createVenture(pdb, actorId, routeId, ts) insertEvent.run( actorId, ev.type, routeId, 'venture', JSON.stringify({ ...ev, venture_id: ventureId }), ts ) continue } // Change 1 — venture closure on dispatch_complete if (ev.type === 'dispatch_complete' && routeId) { const active = findActiveVenture(pdb, actorId) if (active) { closeVenture(pdb, actorId, routeId, active.venture_id, ts) insertEvent.run( actorId, ev.type, routeId, 'venture', JSON.stringify({ ...ev, venture_id: active.venture_id }), ts ) } else { insertEvent.run( actorId, ev.type, routeId, 'venture', JSON.stringify(ev), ts ) } continue } // All other events — write as-is insertEvent.run( actorId, ev.type || 'unknown', routeId, routeId ? 'venture' : null, JSON.stringify(ev), ts ) } // ── Update parameters + drift log (Change 2) ───────────────────────────── if (body.den !== undefined) { const newDen = String(body.den) if (newDen !== prevDen) { // Find the venture that just completed, if any — use as trigger_ref const lastVenture = pdb.prepare(` SELECT venture_id, venture_label FROM ventures WHERE actor_id = ? AND status = 'complete' ORDER BY completed_at DESC LIMIT 1 `).get(actorId) updateParam(pdb, actorId, 'liquiditas', newDen, newDen, now) writeDriftLog( pdb, actorId, 'liquiditas', 'venture_complete', lastVenture?.venture_id || null, prevDen, newDen, lastVenture?.venture_label || null, now ) } } if (body.aut !== undefined) { const newBand = autBand(body.aut) if (newBand !== prevAut) { updateParamWithSocial(pdb, actorId, 'auctoritas', newBand, newBand, newBand, now) writeDriftLog( pdb, actorId, 'auctoritas', 'interval_complete', null, prevAut, newBand, 'otium rest', now ) } } } function updateParam(pdb, actorId, token, valueTrue, valuePerceived, now) { pdb.prepare(` UPDATE actor_parameters SET superseded_at = ? WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL `).run(now, actorId, token) pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, ?, 'actor', 'roman', ?, ?, 'measured', 'full', 'game_state', ?) `).run(actorId, token, valueTrue, valuePerceived, now) } function updateParamWithSocial(pdb, actorId, token, valueTrue, valuePerceived, valueSocial, now) { pdb.prepare(` UPDATE actor_parameters SET superseded_at = ? WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL `).run(now, actorId, token) pdb.prepare(` INSERT INTO actor_parameters (actor_id, parameter_token, scope, layer, value_true, value_perceived, value_social, confidence_tag, observable_level, drift_source, recorded_at) VALUES (?, ?, 'actor', 'roman', ?, ?, ?, 'indicated', 'partial', 'game_state', ?) `).run(actorId, token, valueTrue, valuePerceived, valueSocial, now) } // ── Fastify server ─────────────────────────────────────────────────────────── const fastify = Fastify({ logger: false }) await fastify.register(fastifyStatic, { root: DIST, prefix: '/', }) fastify.get('/api/map/:h5/:epoch', async (req, reply) => { const { h5: h5param, epoch: epochKey } = req.params if (!/^[0-9a-f]{15}$/.test(h5param)) { return reply.code(400).send({ error: 'Invalid H5 ID' }) } if (!/^[a-z0-9_]+$/.test(epochKey)) { return reply.code(400).send({ error: 'Invalid epoch key' }) } const epoch = stmtEpoch.get(epochKey) if (!epoch) { return reply.code(404).send({ error: `Unknown epoch: ${epochKey}` }) } let h5int try { h5int = h3HexToInt(h5param) } catch { return reply.code(400).send({ error: 'Malformed H5 ID' }) } const rows = stmtH7.all(epoch.sl_offset_cm, h5int) if (!rows.length) { return reply.code(404).send({ error: `No data for H5: ${h5param}` }) } const cells = rows.map(row => ({ h7: row.h7.toString(16), lat: row.lat, lon: row.lon, h7_land: row.h7_land, h9_total: row.h9_total, is_land: row.h7_land * 2 > row.h9_total ? 1 : 0, })) return reply.send({ epoch_key: epochKey, sl_offset_cm: epoch.sl_offset_cm, h5: h5param, cells, }) }) fastify.get('/api/save/:token', async (req, reply) => { const { token } = req.params if (!/^[0-9a-f]{8}$/.test(token)) { return reply.code(400).send({ error: 'Invalid token' }) } const sqlitePath = join(SAVES_DIR, `${token}.sqlite3`) const jsonPath = join(SAVES_DIR, `${token}.json`) if (existsSync(sqlitePath)) { try { const pdb = openPlayerDb(token) const state = readPlayerState(token, pdb) pdb.close() return reply.send(state) } catch (err) { console.error(`[save GET] SQLite read failed for ${token}:`, err.message) return reply.code(500).send({ error: 'Failed to read player database' }) } } if (existsSync(jsonPath)) { try { const raw = await readFile(jsonPath, 'utf8') const json = JSON.parse(raw) const pdb = openPlayerDb(token) migrateJsonToSqlite(token, pdb, json) const state = readPlayerState(token, pdb) pdb.close() console.log(`[save GET] Migrated ${token}.json → ${token}.sqlite3`) return reply.send(state) } catch (err) { console.error(`[save GET] Migration failed for ${token}:`, err.message) return reply.code(500).send({ error: 'Failed to migrate save' }) } } return reply.code(404).send({ error: 'Save not found' }) }) fastify.post('/api/save/:token', async (req, reply) => { const { token } = req.params if (!/^[0-9a-f]{8}$/.test(token)) { return reply.code(400).send({ error: 'Invalid token' }) } if (!req.body || typeof req.body !== 'object') { return reply.code(400).send({ error: 'Invalid body' }) } try { const pdb = openPlayerDb(token) writePlayerState(token, pdb, req.body) pdb.close() return reply.send({ ok: true }) } catch (err) { console.error(`[save POST] SQLite write failed for ${token}:`, err.message) return reply.code(500).send({ error: 'Failed to write player database' }) } }) fastify.setNotFoundHandler((req, reply) => { reply.sendFile('index.html') }) try { await fastify.listen({ port: 3000, host: '0.0.0.0' }) console.log('OTIVM server running on port 3000 — OTIVM-IV (Changes 1+2)') } catch (err) { console.error(err) process.exit(1) }