Files
mass-driver/crates/mass-driver-backend/src/db.rs
scott 47ef19d76c Add Axum backend, Dockerfiles, and K8s deployment manifests
- Axum backend server: health check, transfer matrix cache (base64 BYTEA),
  route cache (JSONB), CORS, gzip compression, tracing
- Postgres schema: transfer_matrices + cached_routes tables with upserts
- Dockerfile.frontend: 3-stage (wasm-pack → SvelteKit → nginx:alpine)
- Dockerfile.backend: 2-stage (rust build → debian:bookworm-slim)
- nginx.conf: SPA fallback, WASM mime type, /api proxy to backend
- docker-compose.yml: Postgres + backend for local development
- K8s manifests: namespace, frontend/backend deployments with services,
  ingress routing, health probes, secret-based DATABASE_URL

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-08 12:22:07 -07:00

153 lines
4.0 KiB
Rust

use sqlx::PgPool;
pub async fn init_db(pool: &PgPool) -> Result<(), sqlx::Error> {
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS transfer_matrices (
config_hash VARCHAR(64) PRIMARY KEY,
station_count INTEGER NOT NULL,
launch_velocity_kms REAL NOT NULL,
matrix_bytes BYTEA NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW(),
accessed_at TIMESTAMPTZ DEFAULT NOW()
);
"#,
)
.execute(pool)
.await?;
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS cached_routes (
id SERIAL PRIMARY KEY,
config_hash VARCHAR(64) REFERENCES transfer_matrices(config_hash) ON DELETE CASCADE,
from_station INTEGER NOT NULL,
to_station INTEGER NOT NULL,
route_json JSONB NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW(),
UNIQUE(config_hash, from_station, to_station)
);
"#,
)
.execute(pool)
.await?;
tracing::info!("Database tables initialized");
Ok(())
}
pub async fn store_matrix(
pool: &PgPool,
config_hash: &str,
station_count: i32,
launch_velocity_kms: f32,
matrix_bytes: &[u8],
) -> Result<(), sqlx::Error> {
sqlx::query(
r#"
INSERT INTO transfer_matrices (config_hash, station_count, launch_velocity_kms, matrix_bytes)
VALUES ($1, $2, $3, $4)
ON CONFLICT (config_hash) DO UPDATE
SET station_count = EXCLUDED.station_count,
launch_velocity_kms = EXCLUDED.launch_velocity_kms,
matrix_bytes = EXCLUDED.matrix_bytes,
accessed_at = NOW()
"#,
)
.bind(config_hash)
.bind(station_count)
.bind(launch_velocity_kms)
.bind(matrix_bytes)
.execute(pool)
.await?;
Ok(())
}
pub async fn get_matrix(
pool: &PgPool,
config_hash: &str,
) -> Result<Option<MatrixRow>, sqlx::Error> {
// Update accessed_at and return the row
let row = sqlx::query_as::<_, MatrixRow>(
r#"
UPDATE transfer_matrices
SET accessed_at = NOW()
WHERE config_hash = $1
RETURNING config_hash, station_count, launch_velocity_kms, matrix_bytes, created_at, accessed_at
"#,
)
.bind(config_hash)
.fetch_optional(pool)
.await?;
Ok(row)
}
pub async fn store_route(
pool: &PgPool,
config_hash: &str,
from_station: i32,
to_station: i32,
route_json: &serde_json::Value,
) -> Result<(), sqlx::Error> {
sqlx::query(
r#"
INSERT INTO cached_routes (config_hash, from_station, to_station, route_json)
VALUES ($1, $2, $3, $4)
ON CONFLICT (config_hash, from_station, to_station) DO UPDATE
SET route_json = EXCLUDED.route_json
"#,
)
.bind(config_hash)
.bind(from_station)
.bind(to_station)
.bind(route_json)
.execute(pool)
.await?;
Ok(())
}
pub async fn get_route(
pool: &PgPool,
config_hash: &str,
from_station: i32,
to_station: i32,
) -> Result<Option<RouteRow>, sqlx::Error> {
let row = sqlx::query_as::<_, RouteRow>(
r#"
SELECT id, config_hash, from_station, to_station, route_json, created_at
FROM cached_routes
WHERE config_hash = $1 AND from_station = $2 AND to_station = $3
"#,
)
.bind(config_hash)
.bind(from_station)
.bind(to_station)
.fetch_optional(pool)
.await?;
Ok(row)
}
#[derive(sqlx::FromRow, serde::Serialize)]
pub struct MatrixRow {
pub config_hash: String,
pub station_count: i32,
pub launch_velocity_kms: f32,
pub matrix_bytes: Vec<u8>,
pub created_at: Option<chrono::DateTime<chrono::Utc>>,
pub accessed_at: Option<chrono::DateTime<chrono::Utc>>,
}
#[derive(sqlx::FromRow, serde::Serialize)]
pub struct RouteRow {
pub id: i32,
pub config_hash: Option<String>,
pub from_station: i32,
pub to_station: i32,
pub route_json: serde_json::Value,
pub created_at: Option<chrono::DateTime<chrono::Utc>>,
}