774 lines
28 KiB
JavaScript
774 lines
28 KiB
JavaScript
// OTIVM server — OTIVM-IV
|
|
// Per-player SQLite integration.
|
|
//
|
|
// Drift log trigger types:
|
|
// newDen < prevDen → trigger_type = 'dispatch_cost' (cost deduction on dispatch)
|
|
// newDen > prevDen → trigger_type = 'venture_complete' (return profit)
|
|
// aut band change → trigger_type = 'interval_complete' (otium rest)
|
|
// otium event → trigger_type = 'otium_access_fee', 'personal_maintenance',
|
|
// 'officia_obligation' (three separate entries)
|
|
//
|
|
// Otium expenditure constants — from docs/economy/cost-calibration-model.md:
|
|
// OTIUM_ACCESS_FEE_DN = 2.00 (LOW confidence)
|
|
// PERSONAL_MAINTENANCE_DN = 4.00 (MEDIUM confidence)
|
|
// OFFICIA_OBLIGATION_DN = 2.00 (LOW confidence)
|
|
// OTIUM_CYCLE_TOTAL_DN = 8.00
|
|
//
|
|
// Cost split — placeholder pending a proper cost model:
|
|
// cost_vectura = 60% of route cost (VECTVRA — freight charge)
|
|
// cost_portoria = 25% of route cost (PORTORIUM — customs duty)
|
|
// cost_other = 15% of route cost (horreum, incidentals)
|
|
//
|
|
// JSON migration: retained per roadmap — JSON files are never deleted.
|
|
|
|
import Fastify from 'fastify'
|
|
import fastifyStatic from '@fastify/static'
|
|
import { readFile, mkdir } from 'fs/promises'
|
|
import { join, dirname } from 'path'
|
|
import { fileURLToPath } from 'url'
|
|
import { existsSync } from 'fs'
|
|
import { readFileSync } from 'fs'
|
|
import Database from 'better-sqlite3'
|
|
|
|
const __dirname = dirname(fileURLToPath(import.meta.url))
|
|
const ROOT = join(__dirname, '..')
|
|
const DIST = join(ROOT, 'dist')
|
|
const SAVES_DIR = join(ROOT, 'data', 'saves')
|
|
const DB_PATH = join(ROOT, 'data', 'otivm.sqlite3')
|
|
const SCHEMA_PATH = join(ROOT, 'data', 'create_player_db.sql')
|
|
|
|
await mkdir(SAVES_DIR, { recursive: true })
|
|
|
|
// ── Otium expenditure constants ──────────────────────────────────────────────
|
|
// Source: docs/economy/cost-calibration-model.md
|
|
// Change only here — three separate drift log entries are written per otium cycle.
|
|
const OTIUM_ACCESS_FEE_DN = 2.00 // LOW confidence
|
|
const PERSONAL_MAINTENANCE_DN = 4.00 // MEDIUM confidence
|
|
const OFFICIA_OBLIGATION_DN = 2.00 // LOW confidence
|
|
const OTIUM_CYCLE_TOTAL_DN = OTIUM_ACCESS_FEE_DN + PERSONAL_MAINTENANCE_DN + OFFICIA_OBLIGATION_DN
|
|
|
|
// ── Cost split constants ─────────────────────────────────────────────────────
|
|
const COST_VECTURA_RATIO = 0.60
|
|
const COST_PORTORIA_RATIO = 0.25
|
|
|
|
const ROUTE_MODE = {
|
|
olive: 'road',
|
|
wine: 'road',
|
|
grain: 'sea',
|
|
linen: 'sea',
|
|
}
|
|
|
|
const ROUTE_CARGO = {
|
|
olive: { type: 'Olive oil, Garum', unit: 'amphora' },
|
|
wine: { type: 'Campanian wine, Wool', unit: 'amphora' },
|
|
grain: { type: 'Adriatic grain, Amber', unit: 'modius' },
|
|
linen: { type: 'Berber linen, Frankincense', unit: 'talent' },
|
|
}
|
|
|
|
const ROUTE_H3 = {
|
|
olive: { origin: '851e805bfffffff', destination: '851e8333fffffff' },
|
|
wine: { origin: '851e8333fffffff', destination: '851e8ba3fffffff' },
|
|
grain: { origin: '851e8ba3fffffff', destination: '85386e23fffffff' },
|
|
linen: { origin: '85386e23fffffff', destination: '853f5ba7fffffff' },
|
|
}
|
|
|
|
const ROUTE_ECONOMICS = {
|
|
olive: { cost: 8, profit: 12, duration_ms: 6000 },
|
|
wine: { cost: 14, profit: 22, duration_ms: 9000 },
|
|
grain: { cost: 24, profit: 40, duration_ms: 12000 },
|
|
linen: { cost: 38, profit: 70, duration_ms: 18000 },
|
|
}
|
|
|
|
const MS_PER_SIM_DAY = 3_000
|
|
|
|
// ── TESSERA world database (read-only) ──────────────────────────────────────
|
|
|
|
const db = new Database(DB_PATH, { readonly: true })
|
|
|
|
const stmtEpoch = db.prepare(
|
|
'SELECT sl_offset_cm FROM paleo_epochs WHERE epoch_key = ?'
|
|
)
|
|
|
|
const stmtH7 = db.prepare(`
|
|
SELECT
|
|
h7,
|
|
AVG(lat) AS lat,
|
|
AVG(lon) AS lon,
|
|
SUM(CASE WHEN elev_cm > ? THEN 1 ELSE 0 END) AS h7_land,
|
|
COUNT(*) AS h9_total
|
|
FROM tessera_cells
|
|
WHERE h5 = ? AND status = 2
|
|
GROUP BY h7
|
|
`)
|
|
|
|
function h3HexToInt(hexStr) {
|
|
return BigInt('0x' + hexStr)
|
|
}
|
|
|
|
// ── Per-player database ──────────────────────────────────────────────────────
|
|
|
|
const PLAYER_SCHEMA_SQL = readFileSync(SCHEMA_PATH, 'utf8')
|
|
|
|
function openPlayerDb(token) {
|
|
const path = join(SAVES_DIR, `${token}.sqlite3`)
|
|
const isNew = !existsSync(path)
|
|
const pdb = new Database(path)
|
|
pdb.pragma('journal_mode = WAL')
|
|
pdb.pragma('foreign_keys = ON')
|
|
if (isNew) {
|
|
pdb.exec(PLAYER_SCHEMA_SQL)
|
|
}
|
|
return pdb
|
|
}
|
|
|
|
function uuid() {
|
|
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, c => {
|
|
const r = Math.random() * 16 | 0
|
|
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16)
|
|
})
|
|
}
|
|
|
|
// ── JSON → SQLite migration ──────────────────────────────────────────────────
|
|
|
|
function migrateJsonToSqlite(token, pdb, json) {
|
|
const now = new Date().toISOString()
|
|
const actorId = token
|
|
const sessionId = token
|
|
const background = json.background_id || 'unknown'
|
|
const name = json.actor_name || 'Mercator'
|
|
|
|
pdb.prepare(`
|
|
INSERT OR IGNORE INTO actor_profile
|
|
(actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at)
|
|
VALUES (?, ?, ?, ?, 'roman_14bce', 5, ?)
|
|
`).run(actorId, sessionId, background, name, now)
|
|
|
|
pdb.prepare(`
|
|
INSERT OR IGNORE INTO actor_parameters
|
|
(actor_id, parameter_token, scope, layer,
|
|
value_true, value_perceived, value_social,
|
|
confidence_tag, observable_level, drift_source, recorded_at)
|
|
SELECT
|
|
?, parameter_token, 'actor',
|
|
CASE WHEN parameter_token IN ('auctoritas','clientela','liquiditas','fama',
|
|
'disciplina','mercatus_scientia','itineris_scientia','ius_accessus',
|
|
'periculum_tolerantia','negotiatio','litterae','officia_burden')
|
|
THEN 'roman' ELSE 'universal' END,
|
|
value_true, value_perceived, NULL,
|
|
confidence_tag, observable_level, 'migration', ?
|
|
FROM background_starting_values
|
|
WHERE background_id = ?
|
|
`).run(actorId, now, background)
|
|
|
|
if (json.den !== undefined) {
|
|
pdb.prepare(`
|
|
INSERT INTO actor_parameters
|
|
(actor_id, parameter_token, scope, layer,
|
|
value_true, value_perceived, confidence_tag, observable_level,
|
|
drift_source, recorded_at)
|
|
VALUES (?, 'liquiditas', 'actor', 'roman', ?, ?, 'measured', 'full', 'migration', ?)
|
|
`).run(actorId, String(json.den), String(json.den), now)
|
|
}
|
|
|
|
if (json.aut !== undefined) {
|
|
const band = autBand(json.aut)
|
|
pdb.prepare(`
|
|
INSERT INTO actor_parameters
|
|
(actor_id, parameter_token, scope, layer,
|
|
value_true, value_perceived, value_social,
|
|
confidence_tag, observable_level, drift_source, recorded_at)
|
|
VALUES (?, 'auctoritas', 'actor', 'roman', ?, ?, ?, 'indicated', 'partial', 'migration', ?)
|
|
`).run(actorId, band, band, band, now)
|
|
}
|
|
|
|
const insertEvent = pdb.prepare(`
|
|
INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at)
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
`)
|
|
for (const ev of (json.events || [])) {
|
|
insertEvent.run(
|
|
actorId, ev.type || 'unknown',
|
|
ev.route_id || null,
|
|
ev.route_id ? 'venture' : null,
|
|
JSON.stringify(ev),
|
|
ev.timestamp_utc || now
|
|
)
|
|
}
|
|
insertEvent.run(
|
|
actorId, 'session_start', null, null,
|
|
JSON.stringify({ source: 'json_migration', schema_version: 5 }),
|
|
now
|
|
)
|
|
}
|
|
|
|
function autBand(aut) {
|
|
if (aut >= 30) return 'distinguished'
|
|
if (aut >= 15) return 'high'
|
|
if (aut >= 5) return 'medium'
|
|
return 'low'
|
|
}
|
|
|
|
// ── Read player state ────────────────────────────────────────────────────────
|
|
|
|
function readPlayerState(token, pdb) {
|
|
const params = pdb.prepare(`
|
|
SELECT parameter_token, value_true, value_perceived, value_social,
|
|
confidence_tag, observable_level
|
|
FROM actor_parameters
|
|
WHERE actor_id = ? AND superseded_at IS NULL
|
|
ORDER BY recorded_at DESC
|
|
`).all(token)
|
|
|
|
const paramMap = {}
|
|
for (const p of params) {
|
|
if (!paramMap[p.parameter_token]) paramMap[p.parameter_token] = p
|
|
}
|
|
|
|
const den = parseInt(paramMap['liquiditas']?.value_true || '0', 10)
|
|
const autStr = paramMap['auctoritas']?.value_true || 'low'
|
|
const aut = autToInt(autStr)
|
|
|
|
const events = pdb.prepare(`
|
|
SELECT event_type AS type, ref_id AS route_id, payload, recorded_at AS timestamp_utc
|
|
FROM events
|
|
WHERE actor_id = ?
|
|
ORDER BY recorded_at ASC
|
|
`).all(token)
|
|
|
|
let dispatches = 0
|
|
const route_dispatches = {}
|
|
const journal_seen = []
|
|
let active_dispatch = null
|
|
let chapter = 1
|
|
|
|
for (const ev of events) {
|
|
if (ev.type === 'dispatch_complete' && ev.route_id) {
|
|
dispatches++
|
|
route_dispatches[ev.route_id] = (route_dispatches[ev.route_id] || 0) + 1
|
|
}
|
|
if (ev.type === 'journal_unlock' && ev.route_id) {
|
|
journal_seen.push({ routeId: ev.route_id, dispatch: route_dispatches[ev.route_id] || 1 })
|
|
}
|
|
if (ev.type === 'venture_start' || ev.type === 'dispatch_start') {
|
|
try {
|
|
const p = JSON.parse(ev.payload || '{}')
|
|
active_dispatch = {
|
|
route_id: ev.route_id || p.route_id,
|
|
started_utc: ev.timestamp_utc,
|
|
duration_ms: p.duration_ms || 0,
|
|
}
|
|
} catch { /* ignore */ }
|
|
}
|
|
if (ev.type === 'venture_complete' || ev.type === 'dispatch_complete') {
|
|
active_dispatch = null
|
|
}
|
|
}
|
|
|
|
if (aut >= 30 && den >= 800) chapter = 5
|
|
else if (aut >= 15 && den >= 350) chapter = 4
|
|
else if (aut >= 5 && den >= 120) chapter = 3
|
|
else if (den >= 40) chapter = 2
|
|
|
|
const profile = pdb.prepare(
|
|
'SELECT * FROM actor_profile WHERE actor_id = ? ORDER BY recorded_at DESC LIMIT 1'
|
|
).get(token)
|
|
|
|
return {
|
|
token,
|
|
background_id: profile?.background_id || 'unknown',
|
|
actor_name: profile?.actor_name || 'Mercator',
|
|
den,
|
|
aut,
|
|
chapter,
|
|
dispatches,
|
|
route_dispatches,
|
|
journal_seen,
|
|
active_dispatch,
|
|
events: events.map(e => ({
|
|
type: e.type,
|
|
route_id: e.route_id,
|
|
timestamp_utc: e.timestamp_utc,
|
|
})),
|
|
created_at: profile?.recorded_at || new Date().toISOString(),
|
|
schema_version: 5,
|
|
}
|
|
}
|
|
|
|
function autToInt(band) {
|
|
switch (band) {
|
|
case 'distinguished': return 35
|
|
case 'high': return 20
|
|
case 'medium': return 7
|
|
default: return 1
|
|
}
|
|
}
|
|
|
|
// ── Seed parameters ──────────────────────────────────────────────────────────
|
|
|
|
function seedParameters(pdb, actorId, backgroundId, denOverride, now) {
|
|
pdb.prepare(`
|
|
INSERT OR IGNORE INTO actor_parameters
|
|
(actor_id, parameter_token, scope, layer,
|
|
value_true, value_perceived, value_social,
|
|
confidence_tag, observable_level, drift_source, recorded_at)
|
|
SELECT
|
|
?, parameter_token, 'actor',
|
|
CASE WHEN parameter_token IN ('auctoritas','clientela','liquiditas','fama',
|
|
'disciplina','mercatus_scientia','itineris_scientia','ius_accessus',
|
|
'periculum_tolerantia','negotiatio','litterae','officia_burden')
|
|
THEN 'roman' ELSE 'universal' END,
|
|
value_true, value_perceived,
|
|
CASE WHEN parameter_token = 'auctoritas' THEN value_true ELSE NULL END,
|
|
confidence_tag, observable_level, 'initial', ?
|
|
FROM background_starting_values
|
|
WHERE background_id = ?
|
|
`).run(actorId, now, backgroundId)
|
|
|
|
if (denOverride !== undefined) {
|
|
pdb.prepare(`
|
|
UPDATE actor_parameters SET superseded_at = ?
|
|
WHERE actor_id = ? AND parameter_token = 'liquiditas' AND superseded_at IS NULL
|
|
`).run(now, actorId)
|
|
pdb.prepare(`
|
|
INSERT INTO actor_parameters
|
|
(actor_id, parameter_token, scope, layer,
|
|
value_true, value_perceived,
|
|
confidence_tag, observable_level, drift_source, recorded_at)
|
|
VALUES (?, 'liquiditas', 'actor', 'roman', ?, ?, 'measured', 'full', 'initial', ?)
|
|
`).run(actorId, String(denOverride), String(denOverride), now)
|
|
}
|
|
}
|
|
|
|
// ── Venture helpers ──────────────────────────────────────────────────────────
|
|
|
|
function findActiveVenture(pdb, actorId) {
|
|
return pdb.prepare(`
|
|
SELECT venture_id, venture_label FROM ventures
|
|
WHERE actor_id = ? AND status = 'active'
|
|
ORDER BY recorded_at DESC LIMIT 1
|
|
`).get(actorId)
|
|
}
|
|
|
|
function findLastCompletedVenture(pdb, actorId) {
|
|
return pdb.prepare(`
|
|
SELECT venture_id, venture_label FROM ventures
|
|
WHERE actor_id = ? AND status = 'complete'
|
|
ORDER BY completed_at DESC LIMIT 1
|
|
`).get(actorId)
|
|
}
|
|
|
|
function createVenture(pdb, actorId, routeId, now) {
|
|
const eco = ROUTE_ECONOMICS[routeId]
|
|
const cargo = ROUTE_CARGO[routeId]
|
|
const h3 = ROUTE_H3[routeId]
|
|
const mode = ROUTE_MODE[routeId] || 'road'
|
|
if (!eco || !cargo || !h3) return null
|
|
|
|
const ventureId = uuid()
|
|
const legId = uuid()
|
|
|
|
const costVectura = Math.round(eco.cost * COST_VECTURA_RATIO * 100) / 100
|
|
const costPortoria = Math.round(eco.cost * COST_PORTORIA_RATIO * 100) / 100
|
|
const costOther = Math.round((eco.cost - costVectura - costPortoria) * 100) / 100
|
|
const durationDays = Math.round(eco.duration_ms / MS_PER_SIM_DAY)
|
|
|
|
const label = `${routeId.charAt(0).toUpperCase() + routeId.slice(1)} route`
|
|
|
|
pdb.prepare(`
|
|
INSERT INTO ventures
|
|
(venture_id, actor_id, venture_label, status,
|
|
cargo_type, cargo_unit, cost_total,
|
|
recorded_at, started_at)
|
|
VALUES (?, ?, ?, 'active', ?, ?, ?, ?, ?)
|
|
`).run(ventureId, actorId, label, cargo.type, cargo.unit, eco.cost, now, now)
|
|
|
|
pdb.prepare(`
|
|
INSERT INTO venture_legs
|
|
(leg_id, venture_id, leg_sequence,
|
|
origin_h3, destination_h3, mode,
|
|
duration_days, cost_vectura, cost_portoria, cost_other, cost_total,
|
|
status, recorded_at, started_at)
|
|
VALUES (?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?)
|
|
`).run(legId, ventureId, h3.origin, h3.destination, mode,
|
|
durationDays, costVectura, costPortoria, costOther, eco.cost, now, now)
|
|
|
|
return ventureId
|
|
}
|
|
|
|
function closeVenture(pdb, actorId, routeId, ventureId, now) {
|
|
const eco = ROUTE_ECONOMICS[routeId]
|
|
if (!eco) return
|
|
const outcomeNet = eco.profit - eco.cost
|
|
pdb.prepare(`
|
|
UPDATE ventures
|
|
SET status = 'complete', revenue_total = ?, outcome_net = ?, completed_at = ?
|
|
WHERE venture_id = ?
|
|
`).run(eco.profit, outcomeNet, now, ventureId)
|
|
pdb.prepare(`
|
|
UPDATE venture_legs SET status = 'complete', completed_at = ?
|
|
WHERE venture_id = ? AND status = 'active'
|
|
`).run(now, ventureId)
|
|
}
|
|
|
|
// ── Drift log ────────────────────────────────────────────────────────────────
|
|
|
|
function writeDriftLog(pdb, actorId, paramToken, triggerType, triggerRef,
|
|
valueBefore, valueAfter, deltaNote, now) {
|
|
pdb.prepare(`
|
|
INSERT INTO parameter_drift_log
|
|
(actor_id, parameter_token, trigger_type, trigger_ref,
|
|
value_before, value_after, delta_note, recorded_at)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
`).run(actorId, paramToken, triggerType, triggerRef,
|
|
String(valueBefore), String(valueAfter), deltaNote, now)
|
|
}
|
|
|
|
// ── Write player state ───────────────────────────────────────────────────────
|
|
|
|
function writePlayerState(token, pdb, body) {
|
|
const now = new Date().toISOString()
|
|
const actorId = token
|
|
|
|
const incomingBackground = body.background_id
|
|
const isRealBackground = incomingBackground && incomingBackground !== 'unknown'
|
|
|
|
const profile = pdb.prepare(
|
|
'SELECT actor_id, background_id FROM actor_profile WHERE actor_id = ? LIMIT 1'
|
|
).get(actorId)
|
|
|
|
if (!profile) {
|
|
const backgroundId = incomingBackground || 'unknown'
|
|
const actorName = body.actor_name || 'Mercator'
|
|
|
|
pdb.prepare(`
|
|
INSERT OR IGNORE INTO actor_profile
|
|
(actor_id, session_id, background_id, actor_name, epoch, schema_version, recorded_at)
|
|
VALUES (?, ?, ?, ?, 'roman_14bce', 5, ?)
|
|
`).run(actorId, actorId, backgroundId, actorName, now)
|
|
|
|
if (isRealBackground) {
|
|
seedParameters(pdb, actorId, backgroundId, body.den, now)
|
|
}
|
|
|
|
} else if (profile.background_id === 'unknown' && isRealBackground) {
|
|
pdb.prepare(`
|
|
UPDATE actor_profile SET background_id = ?, schema_version = 5 WHERE actor_id = ?
|
|
`).run(incomingBackground, actorId)
|
|
|
|
seedParameters(pdb, actorId, incomingBackground, body.den, now)
|
|
console.log(`[profile] Patched background_id for ${token}: unknown → ${incomingBackground}`)
|
|
}
|
|
|
|
// Read current values before any update — null if no prior row exists
|
|
const currentLiq = pdb.prepare(`
|
|
SELECT value_true FROM actor_parameters
|
|
WHERE actor_id = ? AND parameter_token = 'liquiditas' AND superseded_at IS NULL
|
|
ORDER BY recorded_at DESC LIMIT 1
|
|
`).get(actorId)
|
|
const currentAut = pdb.prepare(`
|
|
SELECT value_true FROM actor_parameters
|
|
WHERE actor_id = ? AND parameter_token = 'auctoritas' AND superseded_at IS NULL
|
|
ORDER BY recorded_at DESC LIMIT 1
|
|
`).get(actorId)
|
|
|
|
const prevDen = currentLiq?.value_true ?? null
|
|
const prevAut = currentAut?.value_true ?? null
|
|
|
|
// ── Process new events ───────────────────────────────────────────────────
|
|
|
|
const lastRecorded = pdb.prepare(`
|
|
SELECT recorded_at FROM events WHERE actor_id = ?
|
|
ORDER BY recorded_at DESC LIMIT 1
|
|
`).get(actorId)
|
|
const lastTs = lastRecorded?.recorded_at || '1970-01-01T00:00:00.000Z'
|
|
|
|
const insertEvent = pdb.prepare(`
|
|
INSERT INTO events (actor_id, event_type, ref_id, ref_type, payload, recorded_at)
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
`)
|
|
|
|
for (const ev of (body.events || [])) {
|
|
const ts = ev.timestamp_utc || now
|
|
if (ts <= lastTs) continue
|
|
|
|
const routeId = ev.route_id || null
|
|
|
|
if (ev.type === 'dispatch_start' && routeId) {
|
|
const ventureId = createVenture(pdb, actorId, routeId, ts)
|
|
insertEvent.run(
|
|
actorId, ev.type, routeId, 'venture',
|
|
JSON.stringify({ ...ev, venture_id: ventureId }), ts
|
|
)
|
|
continue
|
|
}
|
|
|
|
if (ev.type === 'dispatch_complete' && routeId) {
|
|
const active = findActiveVenture(pdb, actorId)
|
|
if (active) {
|
|
closeVenture(pdb, actorId, routeId, active.venture_id, ts)
|
|
insertEvent.run(
|
|
actorId, ev.type, routeId, 'venture',
|
|
JSON.stringify({ ...ev, venture_id: active.venture_id }), ts
|
|
)
|
|
} else {
|
|
insertEvent.run(
|
|
actorId, ev.type, routeId, 'venture',
|
|
JSON.stringify(ev), ts
|
|
)
|
|
}
|
|
continue
|
|
}
|
|
|
|
// Otium event — debit liquiditas in three named components
|
|
// Each component writes a separate drift log entry so the sub-trace
|
|
// records the cause of each debit individually.
|
|
if (ev.type === 'otium') {
|
|
insertEvent.run(
|
|
actorId, ev.type, null, null,
|
|
JSON.stringify(ev), ts
|
|
)
|
|
|
|
// Only debit if we have a prior liquiditas value to work from
|
|
if (prevDen !== null) {
|
|
let runningDen = parseFloat(prevDen)
|
|
|
|
const deductions = [
|
|
{ amount: OTIUM_ACCESS_FEE_DN, trigger: 'otium_access_fee', note: 'Commercial information access and factor network maintenance.' },
|
|
{ amount: PERSONAL_MAINTENANCE_DN, trigger: 'personal_maintenance', note: 'Food, lodging, clothing upkeep, light, and local movement.' },
|
|
{ amount: OFFICIA_OBLIGATION_DN, trigger: 'officia_obligation', note: 'Patronage, tips, gifts, collegial contributions, and unrecovered favors.' },
|
|
]
|
|
|
|
for (const deduction of deductions) {
|
|
const denBefore = runningDen
|
|
const denAfter = Math.max(0, runningDen - deduction.amount)
|
|
runningDen = denAfter
|
|
|
|
writeDriftLog(
|
|
pdb, actorId, 'liquiditas',
|
|
deduction.trigger,
|
|
null,
|
|
String(denBefore), String(denAfter),
|
|
deduction.note,
|
|
ts
|
|
)
|
|
}
|
|
|
|
// Update liquiditas to the final post-otium value
|
|
updateParam(pdb, actorId, 'liquiditas', String(runningDen), String(runningDen), ts)
|
|
}
|
|
|
|
continue
|
|
}
|
|
|
|
insertEvent.run(
|
|
actorId, ev.type || 'unknown',
|
|
routeId,
|
|
routeId ? 'venture' : null,
|
|
JSON.stringify(ev), ts
|
|
)
|
|
}
|
|
|
|
// ── Update parameters + drift log ────────────────────────────────────────
|
|
//
|
|
// Drift log trigger types:
|
|
// newDen < prevDen → 'dispatch_cost' (cost deducted at dispatch)
|
|
// newDen > prevDen → 'venture_complete' (profit returned on completion)
|
|
// aut band change → 'interval_complete' (otium rest)
|
|
//
|
|
// No drift log entry when prevDen/prevAut is null (initial seeding).
|
|
// Liquiditas may already have been updated by the otium handler above —
|
|
// the body.den update below handles dispatch and venture changes only.
|
|
|
|
if (body.den !== undefined) {
|
|
const newDen = String(body.den)
|
|
// Re-read current liquiditas in case otium handler already updated it
|
|
const freshLiq = pdb.prepare(`
|
|
SELECT value_true FROM actor_parameters
|
|
WHERE actor_id = ? AND parameter_token = 'liquiditas' AND superseded_at IS NULL
|
|
ORDER BY recorded_at DESC LIMIT 1
|
|
`).get(actorId)
|
|
const freshDen = freshLiq?.value_true ?? null
|
|
|
|
if (freshDen === null) {
|
|
updateParam(pdb, actorId, 'liquiditas', newDen, newDen, now)
|
|
} else if (newDen !== freshDen) {
|
|
const numNew = parseFloat(newDen)
|
|
const numFresh = parseFloat(freshDen)
|
|
updateParam(pdb, actorId, 'liquiditas', newDen, newDen, now)
|
|
|
|
if (numNew < numFresh) {
|
|
const active = findActiveVenture(pdb, actorId)
|
|
writeDriftLog(
|
|
pdb, actorId, 'liquiditas',
|
|
'dispatch_cost',
|
|
active?.venture_id || null,
|
|
freshDen, newDen,
|
|
active?.venture_label || null,
|
|
now
|
|
)
|
|
} else {
|
|
const lastVenture = findLastCompletedVenture(pdb, actorId)
|
|
writeDriftLog(
|
|
pdb, actorId, 'liquiditas',
|
|
'venture_complete',
|
|
lastVenture?.venture_id || null,
|
|
freshDen, newDen,
|
|
lastVenture?.venture_label || null,
|
|
now
|
|
)
|
|
}
|
|
}
|
|
}
|
|
|
|
if (body.aut !== undefined) {
|
|
const newBand = autBand(body.aut)
|
|
if (prevAut === null) {
|
|
updateParamWithSocial(pdb, actorId, 'auctoritas', newBand, newBand, newBand, now)
|
|
} else if (newBand !== prevAut) {
|
|
updateParamWithSocial(pdb, actorId, 'auctoritas', newBand, newBand, newBand, now)
|
|
writeDriftLog(
|
|
pdb, actorId, 'auctoritas',
|
|
'interval_complete',
|
|
null,
|
|
prevAut, newBand,
|
|
'otium rest',
|
|
now
|
|
)
|
|
}
|
|
}
|
|
}
|
|
|
|
function updateParam(pdb, actorId, token, valueTrue, valuePerceived, now) {
|
|
pdb.prepare(`
|
|
UPDATE actor_parameters SET superseded_at = ?
|
|
WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL
|
|
`).run(now, actorId, token)
|
|
pdb.prepare(`
|
|
INSERT INTO actor_parameters
|
|
(actor_id, parameter_token, scope, layer,
|
|
value_true, value_perceived,
|
|
confidence_tag, observable_level, drift_source, recorded_at)
|
|
VALUES (?, ?, 'actor', 'roman', ?, ?, 'measured', 'full', 'game_state', ?)
|
|
`).run(actorId, token, valueTrue, valuePerceived, now)
|
|
}
|
|
|
|
function updateParamWithSocial(pdb, actorId, token, valueTrue, valuePerceived, valueSocial, now) {
|
|
pdb.prepare(`
|
|
UPDATE actor_parameters SET superseded_at = ?
|
|
WHERE actor_id = ? AND parameter_token = ? AND superseded_at IS NULL
|
|
`).run(now, actorId, token)
|
|
pdb.prepare(`
|
|
INSERT INTO actor_parameters
|
|
(actor_id, parameter_token, scope, layer,
|
|
value_true, value_perceived, value_social,
|
|
confidence_tag, observable_level, drift_source, recorded_at)
|
|
VALUES (?, ?, 'actor', 'roman', ?, ?, ?, 'indicated', 'partial', 'game_state', ?)
|
|
`).run(actorId, token, valueTrue, valuePerceived, valueSocial, now)
|
|
}
|
|
|
|
// ── Fastify server ───────────────────────────────────────────────────────────
|
|
|
|
const fastify = Fastify({ logger: false })
|
|
|
|
await fastify.register(fastifyStatic, { root: DIST, prefix: '/' })
|
|
|
|
fastify.get('/api/map/:h5/:epoch', async (req, reply) => {
|
|
const { h5: h5param, epoch: epochKey } = req.params
|
|
|
|
if (!/^[0-9a-f]{15}$/.test(h5param))
|
|
return reply.code(400).send({ error: 'Invalid H5 ID' })
|
|
if (!/^[a-z0-9_]+$/.test(epochKey))
|
|
return reply.code(400).send({ error: 'Invalid epoch key' })
|
|
|
|
const epoch = stmtEpoch.get(epochKey)
|
|
if (!epoch) return reply.code(404).send({ error: `Unknown epoch: ${epochKey}` })
|
|
|
|
let h5int
|
|
try { h5int = h3HexToInt(h5param) }
|
|
catch { return reply.code(400).send({ error: 'Malformed H5 ID' }) }
|
|
|
|
const rows = stmtH7.all(epoch.sl_offset_cm, h5int)
|
|
if (!rows.length) return reply.code(404).send({ error: `No data for H5: ${h5param}` })
|
|
|
|
return reply.send({
|
|
epoch_key: epochKey,
|
|
sl_offset_cm: epoch.sl_offset_cm,
|
|
h5: h5param,
|
|
cells: rows.map(row => ({
|
|
h7: row.h7.toString(16),
|
|
lat: row.lat,
|
|
lon: row.lon,
|
|
h7_land: row.h7_land,
|
|
h9_total: row.h9_total,
|
|
is_land: row.h7_land * 2 > row.h9_total ? 1 : 0,
|
|
})),
|
|
})
|
|
})
|
|
|
|
fastify.get('/api/save/:token', async (req, reply) => {
|
|
const { token } = req.params
|
|
if (!/^[0-9a-f]{8}$/.test(token))
|
|
return reply.code(400).send({ error: 'Invalid token' })
|
|
|
|
const sqlitePath = join(SAVES_DIR, `${token}.sqlite3`)
|
|
const jsonPath = join(SAVES_DIR, `${token}.json`)
|
|
|
|
if (existsSync(sqlitePath)) {
|
|
try {
|
|
const pdb = openPlayerDb(token)
|
|
const state = readPlayerState(token, pdb)
|
|
pdb.close()
|
|
return reply.send(state)
|
|
} catch (err) {
|
|
console.error(`[save GET] SQLite read failed for ${token}:`, err.message)
|
|
return reply.code(500).send({ error: 'Failed to read player database' })
|
|
}
|
|
}
|
|
|
|
if (existsSync(jsonPath)) {
|
|
try {
|
|
const raw = await readFile(jsonPath, 'utf8')
|
|
const json = JSON.parse(raw)
|
|
const pdb = openPlayerDb(token)
|
|
migrateJsonToSqlite(token, pdb, json)
|
|
const state = readPlayerState(token, pdb)
|
|
pdb.close()
|
|
console.log(`[save GET] Migrated ${token}.json → ${token}.sqlite3`)
|
|
return reply.send(state)
|
|
} catch (err) {
|
|
console.error(`[save GET] Migration failed for ${token}:`, err.message)
|
|
return reply.code(500).send({ error: 'Failed to migrate save' })
|
|
}
|
|
}
|
|
|
|
return reply.code(404).send({ error: 'Save not found' })
|
|
})
|
|
|
|
fastify.post('/api/save/:token', async (req, reply) => {
|
|
const { token } = req.params
|
|
if (!/^[0-9a-f]{8}$/.test(token))
|
|
return reply.code(400).send({ error: 'Invalid token' })
|
|
if (!req.body || typeof req.body !== 'object')
|
|
return reply.code(400).send({ error: 'Invalid body' })
|
|
|
|
try {
|
|
const pdb = openPlayerDb(token)
|
|
writePlayerState(token, pdb, req.body)
|
|
pdb.close()
|
|
return reply.send({ ok: true })
|
|
} catch (err) {
|
|
console.error(`[save POST] SQLite write failed for ${token}:`, err.message)
|
|
return reply.code(500).send({ error: 'Failed to write player database' })
|
|
}
|
|
})
|
|
|
|
fastify.setNotFoundHandler((req, reply) => { reply.sendFile('index.html') })
|
|
|
|
try {
|
|
await fastify.listen({ port: 3000, host: '0.0.0.0' })
|
|
console.log('OTIVM server running on port 3000 — OTIVM-IV')
|
|
} catch (err) {
|
|
console.error(err)
|
|
process.exit(1)
|
|
}
|