feat: wire per-player SQLite for OTIVM-III
This commit is contained in:
431
server/index.js
431
server/index.js
@@ -1,3 +1,13 @@
|
||||
// OTIVM server — OTIVM-III
|
||||
// Per-player SQLite integration.
|
||||
// TESSERA world database (data/otivm.sqlite3) — read-only, unchanged.
|
||||
// Player databases (data/saves/{token}.sqlite3) — one per player,
|
||||
// write-safe (single writer), created from data/create_player_db.sql
|
||||
// on first access.
|
||||
// JSON save files (data/saves/{token}.json) — never deleted, migrated
|
||||
// transparently on first SQLite access if present.
|
||||
// Frontend interface unchanged — GET/POST /api/save/:token same as before.
|
||||
|
||||
import Fastify from 'fastify'
|
||||
import fastifyStatic from '@fastify/static'
|
||||
import { readFile, writeFile, mkdir } from 'fs/promises'
|
||||
@@ -11,9 +21,12 @@ const ROOT = join(__dirname, '..')
|
||||
const DIST = join(ROOT, 'dist')
|
||||
const SAVES_DIR = join(ROOT, 'data', 'saves')
|
||||
const DB_PATH = join(ROOT, 'data', 'otivm.sqlite3')
|
||||
const SCHEMA_PATH = join(ROOT, 'data', 'create_player_db.sql')
|
||||
|
||||
await mkdir(SAVES_DIR, { recursive: true })
|
||||
|
||||
// ── TESSERA world database (read-only) ──────────────────────────────────────
|
||||
|
||||
const db = new Database(DB_PATH, { readonly: true })
|
||||
|
||||
const stmtEpoch = db.prepare(
|
||||
@@ -36,6 +49,363 @@ function h3HexToInt(hexStr) {
|
||||
return BigInt('0x' + hexStr)
|
||||
}
|
||||
|
||||
// ── Per-player database ──────────────────────────────────────────────────────
|
||||
//
|
||||
// One SQLite file per player at data/saves/{token}.sqlite3.
|
||||
// Created from data/create_player_db.sql on first access.
|
||||
// Write-safe: only one process writes to a given file at a time.
|
||||
// The better-sqlite3 instance is opened, used, and closed per request —
|
||||
// no persistent connection pool needed at 128 concurrent players.
|
||||
|
||||
// Read the schema once at startup — better-sqlite3 exec() requires the full SQL string.
|
||||
import { readFileSync } from 'fs'
|
||||
const PLAYER_SCHEMA_SQL = readFileSync(SCHEMA_PATH, 'utf8')
|
||||
|
||||
// Open (or create) the player database for a given token.
|
||||
// Returns a better-sqlite3 Database instance — caller must close it.
|
||||
function openPlayerDb(token) {
|
||||
const path = join(SAVES_DIR, `${token}.sqlite3`)
|
||||
const isNew = !existsSync(path)
|
||||
const pdb = new Database(path)
|
||||
pdb.pragma('journal_mode = WAL')
|
||||
pdb.pragma('foreign_keys = ON')
|
||||
if (isNew) {
|
||||
// Run the full schema — idempotent due to IF NOT EXISTS clauses.
|
||||
pdb.exec(PLAYER_SCHEMA_SQL)
|
||||
}
|
||||
return pdb
|
||||
}
|
||||
|
||||
// ── JSON → SQLite migration ──────────────────────────────────────────────────
|
||||
//
|
||||
// Called once per token when a .json exists but no .sqlite3 exists.
|
||||
// Imports the flat JSON game state into the events and actor_parameters
|
||||
// tables so the behavioral record is preserved.
|
||||
// The JSON file is left in place — never deleted.
|
||||
|
||||
function migrateJsonToSqlite(token, pdb, json) {
|
||||
const now = new Date().toISOString()
|
||||
const actorId = token
|
||||
const sessionId = token
|
||||
|
||||
// actor_profile — one row anchoring the actor
|
||||
const background = json.background_id || 'unknown'
|
||||
const name = json.actor_name || 'Mercator'
|
||||
const epoch = 'roman_14bce'
|
||||
|
||||
const insertProfile = pdb.prepare(`
|
||||
INSERT OR IGNORE INTO actor_profile
|
||||
(actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at)
|
||||
VALUES (?, ?, ?, ?, ?, 3, ?)
|
||||
`)
|
||||
insertProfile.run(actorId, sessionId, background, name, epoch, now)
|
||||
|
||||
// Seed actor_parameters from background_starting_values if not already seeded
|
||||
const seedParams = pdb.prepare(`
|
||||
INSERT OR IGNORE INTO actor_parameters
|
||||
(actor_id, parameter_token, scope, layer,
|
||||
value_true, value_perceived, value_social,
|
||||
confidence_tag, observable_level, drift_source, recorded_at)
|
||||
SELECT
|
||||
?, parameter_token, 'actor',
|
||||
CASE WHEN parameter_token IN ('auctoritas','clientela','liquiditas','fama',
|
||||
'disciplina','mercatus_scientia','itineris_scientia','ius_accessus',
|
||||
'periculum_tolerantia','negotiatio','litterae','officia_burden')
|
||||
THEN 'roman' ELSE 'universal' END,
|
||||
value_true, value_perceived, NULL,
|
||||
confidence_tag, observable_level, 'migration', ?
|
||||
FROM background_starting_values
|
||||
WHERE background_id = ?
|
||||
`)
|
||||
seedParams.run(actorId, now, background)
|
||||
|
||||
// Override liquiditas with actual value from JSON if available
|
||||
if (json.den !== undefined) {
|
||||
const upsertLiq = pdb.prepare(`
|
||||
INSERT INTO actor_parameters
|
||||
(actor_id, parameter_token, scope, layer,
|
||||
value_true, value_perceived, confidence_tag, observable_level,
|
||||
drift_source, recorded_at)
|
||||
VALUES (?, 'liquiditas', 'actor', 'roman',
|
||||
?, ?, 'measured', 'full', 'migration', ?)
|
||||
`)
|
||||
upsertLiq.run(actorId, String(json.den), String(json.den), now)
|
||||
}
|
||||
|
||||
// Override auctoritas with actual value from JSON if available
|
||||
if (json.aut !== undefined) {
|
||||
const upsertAut = pdb.prepare(`
|
||||
INSERT INTO actor_parameters
|
||||
(actor_id, parameter_token, scope, layer,
|
||||
value_true, value_perceived, value_social,
|
||||
confidence_tag, observable_level, drift_source, recorded_at)
|
||||
VALUES (?, 'auctoritas', 'actor', 'roman',
|
||||
?, ?, ?, 'indicated', 'partial', 'migration', ?)
|
||||
`)
|
||||
const band = autBand(json.aut)
|
||||
upsertAut.run(actorId, band, band, band, now)
|
||||
}
|
||||
|
||||
// Import legacy events[] array into the events table
|
||||
const insertEvent = pdb.prepare(`
|
||||
INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
`)
|
||||
const eventsArr = json.events || []
|
||||
for (const ev of eventsArr) {
|
||||
insertEvent.run(
|
||||
actorId,
|
||||
ev.type || 'unknown',
|
||||
ev.route_id || null,
|
||||
ev.route_id ? 'venture' : null,
|
||||
JSON.stringify(ev),
|
||||
ev.timestamp_utc || now
|
||||
)
|
||||
}
|
||||
|
||||
// Record the migration itself as an event
|
||||
insertEvent.run(
|
||||
actorId,
|
||||
'session_start',
|
||||
null,
|
||||
null,
|
||||
JSON.stringify({ source: 'json_migration', schema_version: 3 }),
|
||||
now
|
||||
)
|
||||
}
|
||||
|
||||
// Convert raw auctoritas integer (OTIVM-I/II) to ordinal band.
|
||||
function autBand(aut) {
|
||||
if (aut >= 30) return 'distinguished'
|
||||
if (aut >= 15) return 'high'
|
||||
if (aut >= 5) return 'medium'
|
||||
return 'low'
|
||||
}
|
||||
|
||||
// ── Read player state from SQLite ────────────────────────────────────────────
|
||||
//
|
||||
// Returns the current game state as the frontend expects it.
|
||||
// Derives the flat JSON shape from the relational records so the
|
||||
// frontend requires zero changes.
|
||||
|
||||
function readPlayerState(token, pdb) {
|
||||
// Current parameter values — most recent non-superseded row per token
|
||||
const params = pdb.prepare(`
|
||||
SELECT parameter_token, value_true, value_perceived, value_social,
|
||||
confidence_tag, observable_level
|
||||
FROM actor_parameters
|
||||
WHERE actor_id = ? AND superseded_at IS NULL
|
||||
ORDER BY recorded_at DESC
|
||||
`).all(token)
|
||||
|
||||
const paramMap = {}
|
||||
for (const p of params) {
|
||||
paramMap[p.parameter_token] = p
|
||||
}
|
||||
|
||||
const den = parseInt(paramMap['liquiditas']?.value_true || '0', 10)
|
||||
const autStr = paramMap['auctoritas']?.value_true || 'low'
|
||||
const aut = autToInt(autStr)
|
||||
|
||||
// Events — chronological
|
||||
const events = pdb.prepare(`
|
||||
SELECT event_type AS type, ref_id AS route_id, payload, recorded_at AS timestamp_utc
|
||||
FROM events
|
||||
WHERE actor_id = ?
|
||||
ORDER BY recorded_at ASC
|
||||
`).all(token)
|
||||
|
||||
// Derive dispatches and route_dispatches from event log
|
||||
let dispatches = 0
|
||||
const route_dispatches = {}
|
||||
const journal_seen = []
|
||||
let active_dispatch = null
|
||||
let chapter = 1
|
||||
|
||||
for (const ev of events) {
|
||||
if (ev.type === 'dispatch_complete' && ev.route_id) {
|
||||
dispatches++
|
||||
route_dispatches[ev.route_id] = (route_dispatches[ev.route_id] || 0) + 1
|
||||
}
|
||||
if (ev.type === 'journal_unlock' && ev.route_id) {
|
||||
journal_seen.push({
|
||||
routeId: ev.route_id,
|
||||
dispatch: route_dispatches[ev.route_id] || 1,
|
||||
})
|
||||
}
|
||||
if (ev.type === 'venture_start' || ev.type === 'dispatch_start') {
|
||||
try {
|
||||
const p = JSON.parse(ev.payload || '{}')
|
||||
active_dispatch = {
|
||||
route_id: ev.route_id || p.route_id,
|
||||
started_utc: ev.timestamp_utc,
|
||||
duration_ms: p.duration_ms || 0,
|
||||
}
|
||||
} catch { /* ignore malformed payload */ }
|
||||
}
|
||||
if (ev.type === 'venture_complete' || ev.type === 'dispatch_complete') {
|
||||
active_dispatch = null
|
||||
}
|
||||
}
|
||||
|
||||
// chapter derived from den + aut
|
||||
if (aut >= 30 && den >= 800) chapter = 5
|
||||
else if (aut >= 15 && den >= 350) chapter = 4
|
||||
else if (aut >= 5 && den >= 120) chapter = 3
|
||||
else if (den >= 40) chapter = 2
|
||||
|
||||
// actor_profile
|
||||
const profile = pdb.prepare(
|
||||
'SELECT * FROM actor_profile WHERE actor_id = ? ORDER BY recorded_at DESC LIMIT 1'
|
||||
).get(token)
|
||||
|
||||
return {
|
||||
token,
|
||||
background_id: profile?.background_id || 'unknown',
|
||||
actor_name: profile?.actor_name || 'Mercator',
|
||||
den,
|
||||
aut,
|
||||
chapter,
|
||||
dispatches,
|
||||
route_dispatches,
|
||||
journal_seen,
|
||||
active_dispatch,
|
||||
events: events.map(e => ({
|
||||
type: e.type,
|
||||
route_id: e.route_id,
|
||||
timestamp_utc: e.timestamp_utc,
|
||||
})),
|
||||
created_at: profile?.recorded_at || new Date().toISOString(),
|
||||
schema_version: 3,
|
||||
}
|
||||
}
|
||||
|
||||
// Convert ordinal band back to a representative integer for chapter logic.
|
||||
function autToInt(band) {
|
||||
switch (band) {
|
||||
case 'distinguished': return 35
|
||||
case 'high': return 20
|
||||
case 'medium': return 7
|
||||
default: return 1
|
||||
}
|
||||
}
|
||||
|
||||
// ── Write player state to SQLite ─────────────────────────────────────────────
|
||||
//
|
||||
// Receives the flat JSON game state from the frontend.
|
||||
// Writes parameter changes as new actor_parameters rows (append-only).
|
||||
// Appends new events to the events table.
|
||||
// Marks superseded parameter rows.
|
||||
|
||||
function writePlayerState(token, pdb, body) {
|
||||
const now = new Date().toISOString()
|
||||
const actorId = token
|
||||
|
||||
// Ensure actor_profile exists
|
||||
const profile = pdb.prepare(
|
||||
'SELECT actor_id FROM actor_profile WHERE actor_id = ? LIMIT 1'
|
||||
).get(actorId)
|
||||
|
||||
if (!profile) {
|
||||
pdb.prepare(`
|
||||
INSERT OR IGNORE INTO actor_profile
|
||||
(actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at)
|
||||
VALUES (?, ?, ?, ?, 'roman_14bce', 3, ?)
|
||||
`).run(
|
||||
actorId,
|
||||
actorId,
|
||||
body.background_id || 'unknown',
|
||||
body.actor_name || 'Mercator',
|
||||
now
|
||||
)
|
||||
// Seed parameters from background_starting_values
|
||||
pdb.prepare(`
|
||||
INSERT OR IGNORE INTO actor_parameters
|
||||
(actor_id, parameter_token, scope, layer,
|
||||
value_true, value_perceived, confidence_tag, observable_level,
|
||||
drift_source, recorded_at)
|
||||
SELECT ?, parameter_token, 'actor', 'roman',
|
||||
value_true, value_perceived, confidence_tag, observable_level,
|
||||
'initial', ?
|
||||
FROM background_starting_values
|
||||
WHERE background_id = ?
|
||||
`).run(actorId, now, body.background_id || 'unknown')
|
||||
}
|
||||
|
||||
// Update liquiditas if den changed
|
||||
if (body.den !== undefined) {
|
||||
updateParam(pdb, actorId, 'liquiditas', String(body.den), String(body.den), now)
|
||||
}
|
||||
|
||||
// Update auctoritas if aut changed
|
||||
if (body.aut !== undefined) {
|
||||
const band = autBand(body.aut)
|
||||
updateParamWithSocial(pdb, actorId, 'auctoritas', band, band, band, now)
|
||||
}
|
||||
|
||||
// Append new events — only those not already recorded
|
||||
const lastRecorded = pdb.prepare(`
|
||||
SELECT recorded_at FROM events
|
||||
WHERE actor_id = ?
|
||||
ORDER BY recorded_at DESC LIMIT 1
|
||||
`).get(actorId)
|
||||
const lastTs = lastRecorded?.recorded_at || '1970-01-01T00:00:00.000Z'
|
||||
|
||||
const insertEvent = pdb.prepare(`
|
||||
INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
`)
|
||||
|
||||
for (const ev of (body.events || [])) {
|
||||
const ts = ev.timestamp_utc || now
|
||||
if (ts > lastTs) {
|
||||
insertEvent.run(
|
||||
actorId,
|
||||
ev.type || 'unknown',
|
||||
ev.route_id || null,
|
||||
ev.route_id ? 'venture' : null,
|
||||
JSON.stringify(ev),
|
||||
ts
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Append a new actor_parameters row and supersede the previous one.
|
||||
function updateParam(pdb, actorId, token, valueTrue, valuePerceived, now) {
|
||||
pdb.prepare(`
|
||||
UPDATE actor_parameters
|
||||
SET superseded_at = ?
|
||||
WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL
|
||||
`).run(now, actorId, token)
|
||||
|
||||
pdb.prepare(`
|
||||
INSERT INTO actor_parameters
|
||||
(actor_id, parameter_token, scope, layer,
|
||||
value_true, value_perceived,
|
||||
confidence_tag, observable_level, drift_source, recorded_at)
|
||||
VALUES (?, ?, 'actor', 'roman', ?, ?, 'measured', 'full', 'game_state', ?)
|
||||
`).run(actorId, token, valueTrue, valuePerceived, now)
|
||||
}
|
||||
|
||||
function updateParamWithSocial(pdb, actorId, token, valueTrue, valuePerceived, valueSocial, now) {
|
||||
pdb.prepare(`
|
||||
UPDATE actor_parameters
|
||||
SET superseded_at = ?
|
||||
WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL
|
||||
`).run(now, actorId, token)
|
||||
|
||||
pdb.prepare(`
|
||||
INSERT INTO actor_parameters
|
||||
(actor_id, parameter_token, scope, layer,
|
||||
value_true, value_perceived, value_social,
|
||||
confidence_tag, observable_level, drift_source, recorded_at)
|
||||
VALUES (?, ?, 'actor', 'roman', ?, ?, ?, 'indicated', 'partial', 'game_state', ?)
|
||||
`).run(actorId, token, valueTrue, valuePerceived, valueSocial, now)
|
||||
}
|
||||
|
||||
// ── Fastify server ───────────────────────────────────────────────────────────
|
||||
|
||||
const fastify = Fastify({ logger: false })
|
||||
|
||||
await fastify.register(fastifyStatic, {
|
||||
@@ -95,23 +465,55 @@ fastify.get('/api/map/:h5/:epoch', async (req, reply) => {
|
||||
})
|
||||
})
|
||||
|
||||
// GET /api/save/:token
|
||||
// Returns the current player state.
|
||||
// Priority: SQLite > JSON migration > 404.
|
||||
fastify.get('/api/save/:token', async (req, reply) => {
|
||||
const { token } = req.params
|
||||
if (!/^[0-9a-f]{8}$/.test(token)) {
|
||||
return reply.code(400).send({ error: 'Invalid token' })
|
||||
}
|
||||
const path = join(SAVES_DIR, `${token}.json`)
|
||||
if (!existsSync(path)) {
|
||||
return reply.code(404).send({ error: 'Save not found' })
|
||||
}
|
||||
|
||||
const sqlitePath = join(SAVES_DIR, `${token}.sqlite3`)
|
||||
const jsonPath = join(SAVES_DIR, `${token}.json`)
|
||||
|
||||
// Case 1: SQLite exists — read from it
|
||||
if (existsSync(sqlitePath)) {
|
||||
try {
|
||||
const raw = await readFile(path, 'utf8')
|
||||
return reply.send(JSON.parse(raw))
|
||||
} catch {
|
||||
return reply.code(500).send({ error: 'Failed to read save' })
|
||||
const pdb = openPlayerDb(token)
|
||||
const state = readPlayerState(token, pdb)
|
||||
pdb.close()
|
||||
return reply.send(state)
|
||||
} catch (err) {
|
||||
console.error(`[save GET] SQLite read failed for ${token}:`, err.message)
|
||||
return reply.code(500).send({ error: 'Failed to read player database' })
|
||||
}
|
||||
}
|
||||
|
||||
// Case 2: JSON exists — migrate to SQLite, return migrated state
|
||||
if (existsSync(jsonPath)) {
|
||||
try {
|
||||
const raw = await readFile(jsonPath, 'utf8')
|
||||
const json = JSON.parse(raw)
|
||||
const pdb = openPlayerDb(token)
|
||||
migrateJsonToSqlite(token, pdb, json)
|
||||
const state = readPlayerState(token, pdb)
|
||||
pdb.close()
|
||||
console.log(`[save GET] Migrated ${token}.json → ${token}.sqlite3`)
|
||||
return reply.send(state)
|
||||
} catch (err) {
|
||||
console.error(`[save GET] Migration failed for ${token}:`, err.message)
|
||||
return reply.code(500).send({ error: 'Failed to migrate save' })
|
||||
}
|
||||
}
|
||||
|
||||
// Case 3: Neither exists
|
||||
return reply.code(404).send({ error: 'Save not found' })
|
||||
})
|
||||
|
||||
// POST /api/save/:token
|
||||
// Writes player state to SQLite.
|
||||
// Creates the player database if it does not exist.
|
||||
fastify.post('/api/save/:token', async (req, reply) => {
|
||||
const { token } = req.params
|
||||
if (!/^[0-9a-f]{8}$/.test(token)) {
|
||||
@@ -120,12 +522,15 @@ fastify.post('/api/save/:token', async (req, reply) => {
|
||||
if (!req.body || typeof req.body !== 'object') {
|
||||
return reply.code(400).send({ error: 'Invalid body' })
|
||||
}
|
||||
const path = join(SAVES_DIR, `${token}.json`)
|
||||
|
||||
try {
|
||||
await writeFile(path, JSON.stringify(req.body, null, 2), 'utf8')
|
||||
const pdb = openPlayerDb(token)
|
||||
writePlayerState(token, pdb, req.body)
|
||||
pdb.close()
|
||||
return reply.send({ ok: true })
|
||||
} catch {
|
||||
return reply.code(500).send({ error: 'Failed to write save' })
|
||||
} catch (err) {
|
||||
console.error(`[save POST] SQLite write failed for ${token}:`, err.message)
|
||||
return reply.code(500).send({ error: 'Failed to write player database' })
|
||||
}
|
||||
})
|
||||
|
||||
@@ -135,7 +540,7 @@ fastify.setNotFoundHandler((req, reply) => {
|
||||
|
||||
try {
|
||||
await fastify.listen({ port: 3000, host: '0.0.0.0' })
|
||||
console.log('OTIVM server running on port 3000')
|
||||
console.log('OTIVM server running on port 3000 — OTIVM-III')
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
|
||||
Reference in New Issue
Block a user