first commit

This commit is contained in:
2025-11-25 16:58:24 +01:00
commit f5f5f10338
17 changed files with 5389 additions and 0 deletions

18
.env Normal file
View File

@@ -0,0 +1,18 @@
POSTGRES_USER=map
POSTGRES_PASSWORD=map
POSTGRES_DB=map
POSTGRES_PORT=5432
REDIS_PORT=6379
MINIO_PORT=9000
MINIO_CONSOLE_PORT=9001
MINIO_ROOT_USER=minioadmin
MINIO_ROOT_PASSWORD=minioadmin
TILE_SERVICE_PORT=8081
ROUTE_SERVICE_PORT=8082
CLIENT_PORT=8080
SERVICE_LOG_LEVEL=debug

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

3778
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

7
Cargo.toml Normal file
View File

@@ -0,0 +1,7 @@
[workspace]
members = [
"backend",
"frontend",
"importer",
]
resolver = "2"

30
Dockerfile Normal file
View File

@@ -0,0 +1,30 @@
# Build Frontend
FROM rust:latest as frontend-builder
WORKDIR /app
COPY frontend ./frontend
COPY backend/static ./backend/static
# Install wasm-pack
RUN curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
WORKDIR /app/frontend
# Build frontend
RUN wasm-pack build --target web --out-name wasm --out-dir ../backend/static
# Build Backend
FROM rust:latest as backend-builder
WORKDIR /app
COPY backend ./backend
COPY --from=frontend-builder /app/backend/static ./backend/static
WORKDIR /app/backend
RUN cargo build --release
# Runtime
FROM debian:bookworm-slim
WORKDIR /app
COPY --from=backend-builder /app/backend/target/release/backend ./backend
COPY --from=frontend-builder /app/backend/static ./static
# Install ca-certificates for HTTPS if needed
RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
ENV SCYLLA_URI=scylla:9042
EXPOSE 3000
CMD ["./backend"]

15
Makefile Normal file
View File

@@ -0,0 +1,15 @@
.PHONY: dev-db dev-backend dev-frontend build-frontend
dev-db:
docker-compose up -d
dev-backend:
cd backend && cargo run
build-frontend:
cd frontend && wasm-pack build --target web --out-name wasm --out-dir ../backend/static
dev-frontend:
# This is a placeholder. Usually we'd use a dev server or just rebuild.
# For now, we rely on the backend serving the static files.
make build-frontend

1
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

15
backend/Cargo.toml Normal file
View File

@@ -0,0 +1,15 @@
[package]
name = "backend"
version = "0.1.0"
edition = "2021"
[dependencies]
axum = "0.7"
tokio = { version = "1.0", features = ["full"] }
scylla = "0.12" # Check for latest version, using a recent stable one
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tower = "0.4"
tower-http = { version = "0.5", features = ["cors", "fs"] }
tracing = "0.1"
tracing-subscriber = "0.3"

83
backend/src/db.rs Normal file
View File

@@ -0,0 +1,83 @@
use scylla::{Session, SessionBuilder};
use std::sync::Arc;
pub async fn initialize_schema(session: &Session) -> Result<(), Box<dyn std::error::Error>> {
// Create keyspace
session
.query(
"CREATE KEYSPACE IF NOT EXISTS map_data WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }",
&[],
)
.await?;
// Create table for OSM nodes (points)
// Partition by tile coordinates (zoom, x, y) for efficient retrieval
// This is a simplified schema. Real OSM data is more complex.
session
.query(
"CREATE TABLE IF NOT EXISTS map_data.nodes (
zoom int,
tile_x int,
tile_y int,
id bigint,
lat double,
lon double,
tags map<text, text>,
PRIMARY KEY ((zoom, tile_x, tile_y), id)
)",
&[],
)
.await?;
session
.query(
"CREATE TABLE IF NOT EXISTS map_data.ways (
zoom int,
tile_x int,
tile_y int,
id bigint,
tags map<text, text>,
points blob,
PRIMARY KEY ((zoom, tile_x, tile_y), id)
)",
&[],
)
.await?;
session
.query(
"CREATE TABLE IF NOT EXISTS map_data.buildings (
zoom int,
tile_x int,
tile_y int,
id bigint,
tags map<text, text>,
points blob,
PRIMARY KEY ((zoom, tile_x, tile_y), id)
)",
&[],
)
.await?;
println!("Schema initialized.");
Ok(())
}
pub async fn seed_data(session: &Session) -> Result<(), Box<dyn std::error::Error>> {
// Insert some dummy data for Munich (approx lat/lon)
// Munich is roughly at lat 48.1351, lon 11.5820
// At zoom 10, this falls into a specific tile.
// For simplicity, we'll just use a fixed tile coordinate for testing: 10/500/500 (not accurate, just for ID)
let insert_stmt = "INSERT INTO map_data.nodes (zoom, tile_x, tile_y, id, lat, lon, tags) VALUES (?, ?, ?, ?, ?, ?, ?)";
let prepared = session.prepare(insert_stmt).await?;
// Point 1: Marienplatz
session.execute(&prepared, (10, 500, 500, 1_i64, 48.137, 11.575, std::collections::HashMap::from([("name".to_string(), "Marienplatz".to_string())]))).await?;
// Point 2: English Garden
session.execute(&prepared, (10, 500, 500, 2_i64, 48.150, 11.590, std::collections::HashMap::from([("name".to_string(), "English Garden".to_string())]))).await?;
println!("Test data seeded.");
Ok(())
}

146
backend/src/main.rs Normal file
View File

@@ -0,0 +1,146 @@
mod db;
use axum::{
routing::get,
Router,
extract::{State, Path},
Json,
};
use scylla::{Session, SessionBuilder};
use std::sync::Arc;
use tower_http::services::ServeDir;
use tower_http::cors::CorsLayer;
use serde::Serialize;
struct AppState {
scylla_session: Arc<Session>,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Initialize tracing
tracing_subscriber::fmt::init();
println!("Connecting to ScyllaDB...");
let uri = std::env::var("SCYLLA_URI").unwrap_or_else(|_| "127.0.0.1:9042".to_string());
let session = SessionBuilder::new()
.known_node(uri)
.build()
.await?;
// Initialize schema and seed data
db::initialize_schema(&session).await?;
db::seed_data(&session).await?;
let session = Arc::new(session);
println!("Connected to ScyllaDB!");
let state = Arc::new(AppState {
scylla_session: session,
});
let app = Router::new()
.route("/health", get(health_check))
.route("/api/tiles/:z/:x/:y", get(get_tile))
.route("/api/tiles/:z/:x/:y/ways", get(get_tile_ways))
.route("/api/tiles/:z/:x/:y/buildings", get(get_tile_buildings))
.nest_service("/", ServeDir::new("static"))
.layer(CorsLayer::permissive())
.with_state(state);
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await?;
println!("Server listening on {}", listener.local_addr()?);
axum::serve(listener, app).await?;
Ok(())
}
async fn health_check() -> &'static str {
"OK"
}
#[derive(Serialize)]
struct MapNode {
id: i64,
lat: f64,
lon: f64,
tags: std::collections::HashMap<String, String>,
}
async fn get_tile(
Path((z, x, y)): Path<(i32, i32, i32)>,
State(state): State<Arc<AppState>>,
) -> Json<Vec<MapNode>> {
let query = "SELECT id, lat, lon, tags FROM map_data.nodes WHERE zoom = ? AND tile_x = ? AND tile_y = ?";
let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default();
let mut nodes = Vec::new();
for row in rows {
let (id, lat, lon, tags) = row.into_typed::<(i64, f64, f64, std::collections::HashMap<String, String>)>().unwrap();
nodes.push(MapNode { id, lat, lon, tags });
}
Json(nodes)
}
#[derive(Serialize)]
struct MapWay {
id: i64,
tags: std::collections::HashMap<String, String>,
points: Vec<Vec<f64>>, // List of [lat, lon]
}
async fn get_tile_ways(
Path((z, x, y)): Path<(i32, i32, i32)>,
State(state): State<Arc<AppState>>,
) -> Json<Vec<MapWay>> {
let query = "SELECT id, tags, points FROM map_data.ways WHERE zoom = ? AND tile_x = ? AND tile_y = ?";
let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default();
let mut ways = Vec::new();
for row in rows {
let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap<String, String>, Vec<u8>)>().unwrap();
// Deserialize points blob
let mut points = Vec::new();
for chunk in points_blob.chunks(16) {
if chunk.len() == 16 {
let lat = f64::from_be_bytes(chunk[0..8].try_into().unwrap());
let lon = f64::from_be_bytes(chunk[8..16].try_into().unwrap());
points.push(vec![lat, lon]);
}
}
ways.push(MapWay { id, tags, points });
}
Json(ways)
}
async fn get_tile_buildings(
Path((z, x, y)): Path<(i32, i32, i32)>,
State(state): State<Arc<AppState>>,
) -> Json<Vec<MapWay>> {
let query = "SELECT id, tags, points FROM map_data.buildings WHERE zoom = ? AND tile_x = ? AND tile_y = ?";
let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default();
let mut buildings = Vec::new();
for row in rows {
let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap<String, String>, Vec<u8>)>().unwrap();
// Deserialize points blob
let mut points = Vec::new();
for chunk in points_blob.chunks(16) {
if chunk.len() == 16 {
let lat = f64::from_be_bytes(chunk[0..8].try_into().unwrap());
let lon = f64::from_be_bytes(chunk[8..16].try_into().unwrap());
points.push(vec![lat, lon]);
}
}
buildings.push(MapWay { id, tags, points });
}
Json(buildings)
}

27
docker-compose.yml Normal file
View File

@@ -0,0 +1,27 @@
version: '3'
services:
scylla:
image: scylladb/scylla:latest
container_name: scylla
ports:
- "9042:9042"
- "9160:9160"
- "10000:10000"
command: --smp 1 --memory 750M --overprovisioned 1 --api-address 0.0.0.0 --max-memory-for-unlimited-query-soft-limit 10485760 --tombstone-warn-threshold 100000
volumes:
- scylla_data:/var/lib/scylla
app:
build: .
container_name: map-app
ports:
- "3000:3000"
depends_on:
- scylla
environment:
- SCYLLA_URI=scylla:9042
restart: always
volumes:
scylla_data:

1
frontend/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

45
frontend/Cargo.toml Normal file
View File

@@ -0,0 +1,45 @@
[package]
name = "frontend"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
wasm-bindgen = "0.2"
wasm-bindgen-futures = "0.4"
web-sys = { version = "0.3", features = [
"Document",
"Window",
"Element",
"HtmlCanvasElement",
"HtmlElement",
"Node",
"console",
"Response",
"HtmlButtonElement",
"Event",
"MouseEvent",
"Geolocation",
"Navigator",
"Position",
"PositionError",
"Coordinates",
"Cache",
"CacheStorage",
"Request",
"RequestInit",
"RequestMode",
"Response",
] }
wgpu = { version = "0.19", default-features = false, features = ["webgl", "wgsl"] }
winit = { version = "0.29", default-features = false, features = ["rwh_06"] }
bytemuck = { version = "1.14", features = ["derive"] }
log = "0.4"
console_error_panic_hook = "0.1"
console_log = "1.0"
reqwest = { version = "0.11", features = ["json"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
earcutr = "0.4"

1047
frontend/src/lib.rs Normal file

File diff suppressed because it is too large Load Diff

1
importer/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

10
importer/Cargo.toml Normal file
View File

@@ -0,0 +1,10 @@
[package]
name = "importer"
version = "0.1.0"
edition = "2021"
[dependencies]
osmpbf = "0.3" # Pure Rust PBF parser, easier to build than osmium (C++ bindings)
scylla = "0.12"
tokio = { version = "1.0", features = ["full"] }
anyhow = "1.0"

164
importer/src/main.rs Normal file
View File

@@ -0,0 +1,164 @@
use anyhow::Result;
use osmpbf::{Element, ElementReader};
use scylla::{Session, SessionBuilder};
use std::collections::HashMap;
use tokio::task::JoinSet;
#[tokio::main]
async fn main() -> Result<()> {
// Connect to ScyllaDB
let uri = std::env::var("SCYLLA_URI").unwrap_or_else(|_| "127.0.0.1:9042".to_string());
println!("Connecting to ScyllaDB at {}...", uri);
let session = SessionBuilder::new().known_node(uri).build().await?;
let session = std::sync::Arc::new(session);
// Ensure schema exists
session.query("CREATE KEYSPACE IF NOT EXISTS map_data WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }", &[]).await?;
// Create tables
session.query("CREATE TABLE IF NOT EXISTS map_data.nodes (zoom int, tile_x int, tile_y int, id bigint, lat double, lon double, tags map<text, text>, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
session.query("CREATE TABLE IF NOT EXISTS map_data.ways (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
let path = "sample.osm.pbf";
println!("Reading {}...", path);
let reader = ElementReader::from_path(path)?;
// Cache for node coordinates: ID -> (lat, lon)
let mut node_cache = HashMap::<i64, (f64, f64)>::new();
let mut join_set = JoinSet::new();
let mut node_count = 0;
let mut way_count = 0;
let mut inserted_nodes = 0;
let mut inserted_ways = 0;
// We process sequentially: Nodes first, then Ways.
reader.for_each(|element| {
match element {
Element::Node(node) => {
node_count += 1;
node_cache.insert(node.id(), (node.lat(), node.lon()));
if node.tags().count() > 0 {
inserted_nodes += 1;
let session = session.clone();
let id = node.id();
let lat = node.lat();
let lon = node.lon();
let tags: HashMap<String, String> = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect();
let (x, y) = lat_lon_to_tile(lat, lon, 10);
join_set.spawn(async move {
let _ = session.query(
"INSERT INTO map_data.nodes (zoom, tile_x, tile_y, id, lat, lon, tags) VALUES (?, ?, ?, ?, ?, ?, ?)",
(10, x, y, id, lat, lon, tags),
).await;
});
}
}
Element::DenseNode(node) => {
node_count += 1;
node_cache.insert(node.id(), (node.lat(), node.lon()));
if node.tags().count() > 0 {
inserted_nodes += 1;
let session = session.clone();
let id = node.id();
let lat = node.lat();
let lon = node.lon();
let tags: HashMap<String, String> = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect();
let (x, y) = lat_lon_to_tile(lat, lon, 10);
join_set.spawn(async move {
let _ = session.query(
"INSERT INTO map_data.nodes (zoom, tile_x, tile_y, id, lat, lon, tags) VALUES (?, ?, ?, ?, ?, ?, ?)",
(10, x, y, id, lat, lon, tags),
).await;
});
}
}
Element::Way(way) => {
way_count += 1;
let tags: HashMap<String, String> = way.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect();
// Filter for highways/roads OR buildings
let is_highway = tags.contains_key("highway");
let is_building = tags.contains_key("building");
if is_highway || is_building {
let mut points = Vec::new();
// Resolve nodes
for node_id in way.refs() {
if let Some(&coords) = node_cache.get(&node_id) {
points.push(coords);
}
}
if points.len() >= 2 {
let session = session.clone();
let id = way.id();
// Insert into the tile of the first point
let (first_lat, first_lon) = points[0];
let (x, y) = lat_lon_to_tile(first_lat, first_lon, 10);
// Serialize points to blob (f64, f64) pairs
let mut blob = Vec::with_capacity(points.len() * 16);
for (lat, lon) in points {
blob.extend_from_slice(&lat.to_be_bytes());
blob.extend_from_slice(&lon.to_be_bytes());
}
if is_highway {
inserted_ways += 1;
let tags_clone = tags.clone();
let blob_clone = blob.clone();
let session = session.clone();
join_set.spawn(async move {
let _ = session.query(
"INSERT INTO map_data.ways (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)",
(10, x, y, id, tags_clone, blob_clone),
).await;
});
}
if is_building {
// inserted_buildings += 1; // Need to add this counter
let session = session.clone();
join_set.spawn(async move {
let _ = session.query(
"INSERT INTO map_data.buildings (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)",
(10, x, y, id, tags, blob),
).await;
});
}
}
}
}
_ => {}
}
if (node_count + way_count) % 100_000 == 0 {
println!("Processed {} nodes, {} ways...", node_count, way_count);
}
})?;
println!("Finished processing. Nodes: {}, Ways: {}. Inserted Nodes: {}, Inserted Ways: {}", node_count, way_count, inserted_nodes, inserted_ways);
println!("Waiting for pending inserts...");
while let Some(_) = join_set.join_next().await {}
println!("Done!");
Ok(())
}
fn lat_lon_to_tile(lat: f64, lon: f64, zoom: u32) -> (i32, i32) {
let n = 2.0f64.powi(zoom as i32);
let x = (lon + 180.0) / 360.0 * n;
let lat_rad = lat.to_radians();
let y = (1.0 - (lat_rad.tan() + (1.0 / lat_rad.cos())).ln() / std::f64::consts::PI) / 2.0 * n;
(x as i32, y as i32)
}