diff --git a/.env b/.env index 943524c..76667b2 100644 --- a/.env +++ b/.env @@ -16,3 +16,4 @@ CLIENT_PORT=8080 SERVICE_LOG_LEVEL=debug +OSM_PBF_PATH=europe-latest.osm.pbf diff --git a/.gitignore b/.gitignore index 3122780..0c87d91 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,4 @@ scylla_data/ pkg/ node_modules/ .DS_Store -sample.osm.pbf \ No newline at end of file +*.pbf \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 98c6b9e..c2a4499 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,14 +17,28 @@ COPY --from=frontend-builder /app/backend/static ./backend/static WORKDIR /app/backend RUN cargo build --release -# Runtime -FROM debian:bookworm-slim +# Build Importer +FROM rust:latest as importer-builder +WORKDIR /app +COPY importer ./importer +WORKDIR /app/importer +RUN cargo build --release + +# Backend Runtime +FROM debian:forky-slim as backend WORKDIR /app COPY --from=backend-builder /app/backend/target/release/backend ./backend COPY --from=frontend-builder /app/backend/static ./static # Install ca-certificates for HTTPS if needed RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/* - ENV SCYLLA_URI=scylla:9042 EXPOSE 3000 CMD ["./backend"] + +# Importer Runtime +FROM debian:forky-slim as importer +WORKDIR /app +COPY --from=importer-builder /app/importer/target/release/importer ./importer +# Install ca-certificates for HTTPS if needed +RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/* +CMD ["./importer"] diff --git a/backend/src/main.rs b/backend/src/main.rs index c82a4d6..537b05c 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -48,6 +48,7 @@ async fn main() -> Result<(), Box> { .route("/api/tiles/:z/:x/:y/buildings", get(get_tile_buildings)) .route("/api/tiles/:z/:x/:y/landuse", get(get_tile_landuse)) .route("/api/tiles/:z/:x/:y/water", get(get_tile_water)) + .route("/api/tiles/:z/:x/:y/railways", get(get_tile_railways)) .nest_service("/", ServeDir::new("static")) .layer(CorsLayer::permissive()) .with_state(state); @@ -74,17 +75,22 @@ struct MapNode { async fn get_tile( Path((z, x, y)): Path<(i32, i32, i32)>, State(state): State>, -) -> Json> { +) -> Result>, (axum::http::StatusCode, String)> { let query = "SELECT id, lat, lon, tags FROM map_data.nodes WHERE zoom = ? AND tile_x = ? AND tile_y = ?"; - let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default(); + let rows = state.scylla_session.query(query, (z, x, y)) + .await + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Database error: {}", e)))? + .rows + .unwrap_or_default(); let mut nodes = Vec::new(); for row in rows { - let (id, lat, lon, tags) = row.into_typed::<(i64, f64, f64, std::collections::HashMap)>().unwrap(); + let (id, lat, lon, tags) = row.into_typed::<(i64, f64, f64, std::collections::HashMap)>() + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Serialization error: {}", e)))?; nodes.push(MapNode { id, lat, lon, tags }); } - Json(nodes) + Ok(Json(nodes)) } #[derive(Serialize)] @@ -97,13 +103,18 @@ struct MapWay { async fn get_tile_ways( Path((z, x, y)): Path<(i32, i32, i32)>, State(state): State>, -) -> Json> { +) -> Result>, (axum::http::StatusCode, String)> { let query = "SELECT id, tags, points FROM map_data.ways WHERE zoom = ? AND tile_x = ? AND tile_y = ?"; - let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default(); + let rows = state.scylla_session.query(query, (z, x, y)) + .await + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Database error: {}", e)))? + .rows + .unwrap_or_default(); let mut ways = Vec::new(); for row in rows { - let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>().unwrap(); + let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>() + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Serialization error: {}", e)))?; // Deserialize points blob let mut points = Vec::new(); @@ -118,19 +129,24 @@ async fn get_tile_ways( ways.push(MapWay { id, tags, points }); } - Json(ways) + Ok(Json(ways)) } async fn get_tile_buildings( Path((z, x, y)): Path<(i32, i32, i32)>, State(state): State>, -) -> Json> { +) -> Result>, (axum::http::StatusCode, String)> { let query = "SELECT id, tags, points FROM map_data.buildings WHERE zoom = ? AND tile_x = ? AND tile_y = ?"; - let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default(); + let rows = state.scylla_session.query(query, (z, x, y)) + .await + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Database error: {}", e)))? + .rows + .unwrap_or_default(); let mut buildings = Vec::new(); for row in rows { - let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>().unwrap(); + let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>() + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Serialization error: {}", e)))?; // Deserialize points blob let mut points = Vec::new(); @@ -145,45 +161,66 @@ async fn get_tile_buildings( buildings.push(MapWay { id, tags, points }); } - Json(buildings) + Ok(Json(buildings)) } async fn get_tile_landuse( Path((z, x, y)): Path<(i32, i32, i32)>, State(state): State>, -) -> Json> { +) -> Result>, (axum::http::StatusCode, String)> { + println!("Request: get_tile_landuse({}, {}, {})", z, x, y); let query = "SELECT id, tags, points FROM map_data.landuse WHERE zoom = ? AND tile_x = ? AND tile_y = ?"; - let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default(); + println!("Executing query..."); + let result = state.scylla_session.query(query, (z, x, y)).await; - let mut landuse = Vec::new(); - for row in rows { - let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>().unwrap(); - - let mut points = Vec::new(); - for chunk in points_blob.chunks(16) { - if chunk.len() == 16 { - let lat = f64::from_be_bytes(chunk[0..8].try_into().unwrap()); - let lon = f64::from_be_bytes(chunk[8..16].try_into().unwrap()); - points.push(vec![lat, lon]); + match result { + Ok(res) => { + println!("Query successful, processing rows..."); + let rows = res.rows.unwrap_or_default(); + let mut landuse = Vec::new(); + for row in rows { + let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>() + .map_err(|e| { + println!("Serialization error: {}", e); + (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Serialization error: {}", e)) + })?; + + let mut points = Vec::new(); + for chunk in points_blob.chunks(16) { + if chunk.len() == 16 { + let lat = f64::from_be_bytes(chunk[0..8].try_into().unwrap()); + let lon = f64::from_be_bytes(chunk[8..16].try_into().unwrap()); + points.push(vec![lat, lon]); + } + } + + landuse.push(MapWay { id, tags, points }); } + println!("Returning {} landuse items", landuse.len()); + Ok(Json(landuse)) + }, + Err(e) => { + println!("Query failed: {}", e); + Err((axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Database error: {}", e))) } - - landuse.push(MapWay { id, tags, points }); } - - Json(landuse) } async fn get_tile_water( Path((z, x, y)): Path<(i32, i32, i32)>, State(state): State>, -) -> Json> { +) -> Result>, (axum::http::StatusCode, String)> { let query = "SELECT id, tags, points FROM map_data.water WHERE zoom = ? AND tile_x = ? AND tile_y = ?"; - let rows = state.scylla_session.query(query, (z, x, y)).await.unwrap().rows.unwrap_or_default(); + let rows = state.scylla_session.query(query, (z, x, y)) + .await + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Database error: {}", e)))? + .rows + .unwrap_or_default(); let mut water = Vec::new(); for row in rows { - let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>().unwrap(); + let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>() + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Serialization error: {}", e)))?; let mut points = Vec::new(); for chunk in points_blob.chunks(16) { @@ -197,5 +234,36 @@ async fn get_tile_water( water.push(MapWay { id, tags, points }); } - Json(water) + Ok(Json(water)) +} + +async fn get_tile_railways( + Path((z, x, y)): Path<(i32, i32, i32)>, + State(state): State>, +) -> Result>, (axum::http::StatusCode, String)> { + let query = "SELECT id, tags, points FROM map_data.railways WHERE zoom = ? AND tile_x = ? AND tile_y = ?"; + let rows = state.scylla_session.query(query, (z, x, y)) + .await + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Database error: {}", e)))? + .rows + .unwrap_or_default(); + + let mut railways = Vec::new(); + for row in rows { + let (id, tags, points_blob) = row.into_typed::<(i64, std::collections::HashMap, Vec)>() + .map_err(|e| (axum::http::StatusCode::INTERNAL_SERVER_ERROR, format!("Serialization error: {}", e)))?; + + let mut points = Vec::new(); + for chunk in points_blob.chunks(16) { + if chunk.len() == 16 { + let lat = f64::from_be_bytes(chunk[0..8].try_into().unwrap()); + let lon = f64::from_be_bytes(chunk[8..16].try_into().unwrap()); + points.push(vec![lat, lon]); + } + } + + railways.push(MapWay { id, tags, points }); + } + + Ok(Json(railways)) } diff --git a/build.log b/build.log new file mode 100644 index 0000000..3a0254a --- /dev/null +++ b/build.log @@ -0,0 +1,266 @@ +time="2025-11-26T11:51:45+01:00" level=warning msg="/Users/ekstrah/Desktop/git/map/docker-compose.yml: the attribute `version` is obsolete, it will be ignored, please remove it to avoid potential confusion" +#1 [internal] load local bake definitions +#1 reading from stdin 520B done +#1 DONE 0.0s + +#2 [internal] load build definition from Dockerfile +#2 transferring dockerfile: 1.41kB done +#2 WARN: FromAsCasing: 'as' and 'FROM' keywords' casing do not match (line 2) +#2 WARN: FromAsCasing: 'as' and 'FROM' keywords' casing do not match (line 13) +#2 WARN: FromAsCasing: 'as' and 'FROM' keywords' casing do not match (line 21) +#2 WARN: FromAsCasing: 'as' and 'FROM' keywords' casing do not match (line 28) +#2 WARN: FromAsCasing: 'as' and 'FROM' keywords' casing do not match (line 39) +#2 DONE 0.0s + +#3 [internal] load metadata for docker.io/library/debian:forky-slim +#3 ... + +#4 [internal] load metadata for docker.io/library/rust:latest +#4 DONE 0.6s + +#3 [internal] load metadata for docker.io/library/debian:forky-slim +#3 DONE 0.6s + +#5 [internal] load .dockerignore +#5 transferring context: 2B done +#5 DONE 0.0s + +#6 [backend 1/5] FROM docker.io/library/debian:forky-slim@sha256:7c8d9645032d8b0e0afa9f95d2cd34f7eedd2915562f5d19bf9c20dec1bf25fc +#6 resolve docker.io/library/debian:forky-slim@sha256:7c8d9645032d8b0e0afa9f95d2cd34f7eedd2915562f5d19bf9c20dec1bf25fc done +#6 DONE 0.0s + +#7 [backend 2/5] WORKDIR /app +#7 CACHED + +#8 [frontend-builder 1/7] FROM docker.io/library/rust:latest@sha256:4a29b0db5c961cd530f39276ece3eb6e66925b59599324c8c19723b72a423615 +#8 resolve docker.io/library/rust:latest@sha256:4a29b0db5c961cd530f39276ece3eb6e66925b59599324c8c19723b72a423615 0.0s done +#8 DONE 0.0s + +#9 [internal] load build context +#9 transferring context: 746B done +#9 DONE 0.0s + +#10 [frontend-builder 2/7] WORKDIR /app +#10 CACHED + +#11 [backend-builder 3/6] COPY backend ./backend +#11 CACHED + +#12 [frontend-builder 5/7] RUN curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh +#12 CACHED + +#13 [frontend-builder 3/7] COPY frontend ./frontend +#13 CACHED + +#14 [frontend-builder 4/7] COPY backend/static ./backend/static +#14 CACHED + +#15 [frontend-builder 6/7] WORKDIR /app/frontend +#15 CACHED + +#16 [frontend-builder 7/7] RUN wasm-pack build --target web --out-name wasm --out-dir ../backend/static +#16 5.805 [INFO]: Checking for the Wasm target... +#16 6.004 info: downloading component 'rust-std' for 'wasm32-unknown-unknown' +#16 6.830 info: installing component 'rust-std' for 'wasm32-unknown-unknown' +#16 8.136 [INFO]: Compiling to Wasm... +#16 8.381 Compiling unicode-ident v1.0.22 +#16 8.381 Compiling proc-macro2 v1.0.103 +#16 8.381 Compiling quote v1.0.42 +#16 8.381 Compiling wasm-bindgen-shared v0.2.105 +#16 8.381 Compiling rustversion v1.0.22 +#16 8.383 Compiling cfg-if v1.0.4 +#16 8.383 Compiling bumpalo v3.19.0 +#16 8.384 Compiling stable_deref_trait v1.2.1 +#16 8.384 Compiling once_cell v1.21.3 +#16 8.729 Compiling cfg_aliases v0.1.1 +#16 8.736 Compiling smallvec v1.15.1 +#16 8.743 Compiling writeable v0.6.2 +#16 8.760 Compiling autocfg v1.5.0 +#16 8.779 Compiling litemap v0.8.1 +#16 8.866 Compiling icu_normalizer_data v2.1.1 +#16 8.895 Compiling version_check v0.9.5 +#16 8.934 Compiling serde_core v1.0.228 +#16 9.065 Compiling icu_properties_data v2.1.1 +#16 9.134 Compiling log v0.4.28 +#16 9.159 Compiling bitflags v2.10.0 +#16 9.180 Compiling num-traits v0.2.19 +#16 9.197 Compiling parking_lot_core v0.9.12 +#16 9.251 Compiling slotmap v1.0.7 +#16 9.255 Compiling thiserror v1.0.69 +#16 9.437 Compiling unicode-width v0.1.14 +#16 9.463 Compiling scopeguard v1.2.0 +#16 9.465 Compiling hashbrown v0.16.1 +#16 9.506 Compiling serde v1.0.228 +#16 9.506 Compiling bit-vec v0.6.3 +#16 9.562 Compiling equivalent v1.0.2 +#16 9.575 Compiling termcolor v1.4.1 +#16 9.627 Compiling wasm-bindgen v0.2.105 +#16 9.636 Compiling lock_api v0.4.14 +#16 9.675 Compiling bit-set v0.5.3 +#16 9.749 Compiling codespan-reporting v0.11.1 +#16 9.764 Compiling syn v2.0.111 +#16 9.800 Compiling wgpu-hal v0.19.5 +#16 9.819 Compiling unicode-xid v0.2.6 +#16 9.947 Compiling indexmap v2.12.1 +#16 9.947 Compiling rustc-hash v1.1.0 +#16 9.969 Compiling hexf-parse v0.2.1 +#16 9.979 Compiling itoa v1.0.15 +#16 9.990 Compiling percent-encoding v2.3.2 +#16 10.03 Compiling raw-window-handle v0.6.2 +#16 10.05 Compiling form_urlencoded v1.2.2 +#16 10.08 Compiling parking_lot v0.12.5 +#16 10.13 Compiling wgpu-core v0.19.4 +#16 10.16 Compiling utf8_iter v1.0.4 +#16 10.17 Compiling serde_json v1.0.145 +#16 10.21 Compiling profiling v1.0.17 +#16 10.24 Compiling ryu v1.0.20 +#16 10.25 Compiling arrayvec v0.7.6 +#16 10.25 Compiling wgpu v0.19.4 +#16 10.29 Compiling winit v0.29.15 +#16 10.34 Compiling memchr v2.7.6 +#16 10.38 Compiling futures-core v0.3.31 +#16 10.43 Compiling either v1.15.0 +#16 10.44 Compiling futures-task v0.3.31 +#16 10.47 Compiling bytes v1.11.0 +#16 10.49 Compiling fnv v1.0.7 +#16 10.51 Compiling pin-utils v0.1.0 +#16 10.54 Compiling pin-project-lite v0.2.16 +#16 10.55 Compiling itertools v0.11.0 +#16 10.56 Compiling futures-util v0.3.31 +#16 10.58 Compiling cursor-icon v1.2.0 +#16 10.67 Compiling static_assertions v1.1.0 +#16 10.68 Compiling smol_str v0.2.2 +#16 10.69 Compiling sync_wrapper v0.1.2 +#16 10.70 Compiling tower-service v0.3.3 +#16 10.72 Compiling base64 v0.21.7 +#16 10.74 Compiling atomic-waker v1.1.2 +#16 10.89 Compiling http v0.2.12 +#16 11.32 Compiling earcutr v0.4.3 +#16 11.81 Compiling synstructure v0.13.2 +#16 11.81 Compiling wasm-bindgen-macro-support v0.2.105 +#16 12.04 Compiling zerofrom-derive v0.1.6 +#16 12.04 Compiling yoke-derive v0.8.1 +#16 12.04 Compiling zerovec-derive v0.11.2 +#16 12.04 Compiling displaydoc v0.2.5 +#16 12.04 Compiling thiserror-impl v1.0.69 +#16 12.04 Compiling serde_derive v1.0.228 +#16 12.04 Compiling bytemuck_derive v1.10.2 +#16 12.80 Compiling zerofrom v0.1.6 +#16 12.80 Compiling bytemuck v1.24.0 +#16 12.85 Compiling naga v0.19.2 +#16 12.86 Compiling yoke v0.8.1 +#16 12.94 Compiling zerovec v0.11.5 +#16 12.94 Compiling zerotrie v0.2.3 +#16 12.96 Compiling wasm-bindgen-macro v0.2.105 +#16 13.35 Compiling tinystr v0.8.2 +#16 13.35 Compiling potential_utf v0.1.4 +#16 13.39 Compiling icu_collections v2.1.1 +#16 13.42 Compiling icu_locale_core v2.1.1 +#16 13.87 Compiling js-sys v0.3.82 +#16 13.87 Compiling console_error_panic_hook v0.1.7 +#16 13.88 Compiling icu_provider v2.1.1 +#16 14.07 Compiling serde_urlencoded v0.7.1 +#16 14.17 Compiling icu_properties v2.1.1 +#16 14.17 Compiling icu_normalizer v2.1.1 +#16 15.02 Compiling idna_adapter v1.2.1 +#16 15.05 Compiling idna v1.1.0 +#16 15.21 Compiling url v2.5.7 +#16 17.02 Compiling web-sys v0.3.82 +#16 17.08 Compiling wasm-bindgen-futures v0.4.55 +#16 17.12 Compiling web-time v0.2.4 +#16 25.07 Compiling wgpu-types v0.19.2 +#16 25.07 Compiling glow v0.13.1 +#16 25.07 Compiling console_log v1.0.0 +#16 25.07 Compiling reqwest v0.11.27 +#16 31.14 Compiling frontend v0.1.0 (/app/frontend) +#16 31.21 warning: unused variable: `railways_data` +#16 31.21 --> src/lib.rs:806:33 +#16 31.21 | +#16 31.21 806 | ... let railways_data = if let Some(json) = fetch_cached(&url_railways).await { +#16 31.21 | ^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_railways_data` +#16 31.21 | +#16 31.21 = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default +#16 31.21 +#16 31.22 warning: value captured by `camera_uniform` is never read +#16 31.22 --> src/lib.rs:871:21 +#16 31.22 | +#16 31.22 871 | camera_uniform = camera_uniform_data; +#16 31.22 | ^^^^^^^^^^^^^^ +#16 31.22 | +#16 31.22 = help: did you mean to capture by reference instead? +#16 31.22 = note: `#[warn(unused_assignments)]` (part of `#[warn(unused)]`) on by default +#16 31.22 +#16 31.22 warning: unused variable: `window_clone` +#16 31.22 --> src/lib.rs:461:9 +#16 31.22 | +#16 31.22 461 | let window_clone = window.clone(); +#16 31.22 | ^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_window_clone` +#16 31.22 +#16 31.23 error[E0382]: use of moved value +#16 31.23 --> src/lib.rs:855:41 +#16 31.23 | +#16 31.23 848 | ... if let Some(railways) = railways_data { +#16 31.23 | -------- value moved here +#16 31.23 ... +#16 31.23 855 | ... if let Some(railways) = railways_data { +#16 31.23 | ^^^^^^^^ value used here after move +#16 31.23 | +#16 31.23 = note: move occurs because value has type `Vec`, which does not implement the `Copy` trait +#16 31.23 help: borrow this binding in the pattern to avoid moving the value +#16 31.23 | +#16 31.23 848 | if let Some(ref railways) = railways_data { +#16 31.23 | +++ +#16 31.23 +#16 31.24 warning: variable does not need to be mutable +#16 31.24 --> src/lib.rs:521:25 +#16 31.24 | +#16 31.24 521 | let mut cam = camera.lock().unwrap(); +#16 31.24 | ----^^^ +#16 31.24 | | +#16 31.24 | help: remove this `mut` +#16 31.24 | +#16 31.24 = note: `#[warn(unused_mut)]` (part of `#[warn(unused)]`) on by default +#16 31.24 +#16 31.27 For more information about this error, try `rustc --explain E0382`. +#16 31.27 warning: `frontend` (lib) generated 4 warnings +#16 31.27 error: could not compile `frontend` (lib) due to 1 previous error; 4 warnings emitted +#16 31.32 Error: Compiling your crate to WebAssembly failed +#16 31.32 Caused by: Compiling your crate to WebAssembly failed +#16 31.32 Caused by: failed to execute `cargo build`: exited with exit status: 101 +#16 31.32 full command: cd "/app/frontend" && "cargo" "build" "--lib" "--release" "--target" "wasm32-unknown-unknown" +#16 ERROR: process "/bin/sh -c wasm-pack build --target web --out-name wasm --out-dir ../backend/static" did not complete successfully: exit code: 1 +------ + > [frontend-builder 7/7] RUN wasm-pack build --target web --out-name wasm --out-dir ../backend/static: +31.24 | +31.24 = note: `#[warn(unused_mut)]` (part of `#[warn(unused)]`) on by default +31.24 +31.27 For more information about this error, try `rustc --explain E0382`. +31.27 warning: `frontend` (lib) generated 4 warnings +31.27 error: could not compile `frontend` (lib) due to 1 previous error; 4 warnings emitted +31.32 Error: Compiling your crate to WebAssembly failed +31.32 Caused by: Compiling your crate to WebAssembly failed +31.32 Caused by: failed to execute `cargo build`: exited with exit status: 101 +31.32 full command: cd "/app/frontend" && "cargo" "build" "--lib" "--release" "--target" "wasm32-unknown-unknown" +------ +Dockerfile:10 + +-------------------- + + 8 | WORKDIR /app/frontend + + 9 | # Build frontend + + 10 | >>> RUN wasm-pack build --target web --out-name wasm --out-dir ../backend/static + + 11 | + + 12 | # Build Backend + +-------------------- + +failed to solve: process "/bin/sh -c wasm-pack build --target web --out-name wasm --out-dir ../backend/static" did not complete successfully: exit code: 1 + + + +View build details: docker-desktop://dashboard/build/default/default/ojyxf9sq9vbhjusaaq2tnqkq5 + diff --git a/docker-compose.yml b/docker-compose.yml index 8836107..13197df 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,20 +8,35 @@ services: - "9042:9042" - "9160:9160" - "10000:10000" - command: --smp 1 --memory 4G --overprovisioned 1 --api-address 0.0.0.0 --max-memory-for-unlimited-query-soft-limit 1073741824 --tombstone-warn-threshold 10000000 + command: --smp 1 --memory 2G --overprovisioned 1 --api-address 0.0.0.0 --max-memory-for-unlimited-query-soft-limit 1073741824 --tombstone-warn-threshold 10000000 volumes: - scylla_data:/var/lib/scylla app: - build: . + build: + context: . + target: backend container_name: map-app ports: - "3000:3000" depends_on: - scylla + restart: always + + importer: + build: + context: . + target: importer + container_name: map-importer + volumes: + - ./oberbayern-251125.osm.pbf:/app/data.osm.pbf environment: - SCYLLA_URI=scylla:9042 - restart: always + - OSM_PBF_PATH=/app/data.osm.pbf + depends_on: + - scylla + profiles: + - import volumes: scylla_data: diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index 3428bbb..4d7de0a 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -133,6 +133,8 @@ struct TileBuffers { landuse_index_count: u32, water_vertex_buffer: wgpu::Buffer, water_index_count: u32, + railway_vertex_buffer: wgpu::Buffer, + railway_vertex_count: u32, } struct AppState { @@ -141,6 +143,7 @@ struct AppState { buildings: HashMap<(i32, i32, i32), Vec>, landuse: HashMap<(i32, i32, i32), Vec>, water: HashMap<(i32, i32, i32), Vec>, + railways: HashMap<(i32, i32, i32), Vec>, buffers: HashMap<(i32, i32, i32), std::sync::Arc>, loaded_tiles: HashSet<(i32, i32, i32)>, pending_tiles: HashSet<(i32, i32, i32)>, @@ -340,6 +343,7 @@ pub async fn run() { buildings: HashMap::new(), landuse: HashMap::new(), water: HashMap::new(), + railways: HashMap::new(), buffers: HashMap::new(), loaded_tiles: HashSet::new(), pending_tiles: HashSet::new(), @@ -451,6 +455,7 @@ pub async fn run() { let building_pipeline = create_building_pipeline(&device, &config.format, &camera_bind_group_layout); let landuse_pipeline = create_landuse_pipeline(&device, &config.format, &camera_bind_group_layout); let water_pipeline = create_water_pipeline(&device, &config.format, &camera_bind_group_layout); + let railway_pipeline = create_railway_pipeline(&device, &config.format, &camera_bind_group_layout); let window_clone = window.clone(); @@ -665,9 +670,26 @@ pub async fn run() { } } } + + // Process railways + let mut railway_vertex_data = Vec::new(); + if let Some(railways) = state_guard.railways.get(tile) { + for way in railways { + if way.points.len() < 2 { continue; } + + for i in 0..way.points.len() - 1 { + let p1 = &way.points[i]; + let p2 = &way.points[i+1]; + let (x1, y1) = project(p1[0], p1[1]); + let (x2, y2) = project(p2[0], p2[1]); + railway_vertex_data.push(Vertex { position: [x1, y1] }); + railway_vertex_data.push(Vertex { position: [x2, y2] }); + } + } + } // Only create buffers if we have data - if !point_instance_data.is_empty() || !road_vertex_data.is_empty() || !building_vertex_data.is_empty() || !landuse_vertex_data.is_empty() || !water_vertex_data.is_empty() { + if !point_instance_data.is_empty() || !road_vertex_data.is_empty() || !building_vertex_data.is_empty() || !landuse_vertex_data.is_empty() || !water_vertex_data.is_empty() || !railway_vertex_data.is_empty() { let point_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("Tile Instance Buffer"), contents: bytemuck::cast_slice(&point_instance_data), @@ -698,6 +720,12 @@ pub async fn run() { contents: bytemuck::cast_slice(&water_vertex_data), usage: wgpu::BufferUsages::VERTEX, }); + + let railway_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { + label: Some("Tile Railway Buffer"), + contents: bytemuck::cast_slice(&railway_vertex_data), + usage: wgpu::BufferUsages::VERTEX, + }); state_guard.buffers.insert(*tile, std::sync::Arc::new(TileBuffers { point_instance_buffer: point_buffer, @@ -710,6 +738,8 @@ pub async fn run() { landuse_index_count: landuse_vertex_data.len() as u32, water_vertex_buffer: water_buffer, water_index_count: water_vertex_data.len() as u32, + railway_vertex_buffer: railway_buffer, + railway_vertex_count: railway_vertex_data.len() as u32, })); } } @@ -771,6 +801,16 @@ pub async fn run() { None }; + // Fetch railways + let url_railways = format!("http://localhost:3000/api/tiles/{}/{}/{}/railways", z, x, y); + let railways_data = if let Some(json) = fetch_cached(&url_railways).await { + serde_json::from_str::>(&json).ok() + } else { + None + }; + + + let mut guard = state_clone.lock().unwrap(); if let Some(nodes) = nodes_data { @@ -799,6 +839,15 @@ pub async fn run() { guard.water.insert((z, x, y), water); } + if let Some(railways) = railways_data { + if !railways.is_empty() { + web_sys::console::log_1(&format!("Fetched {} railway items for tile {}/{}/{}", railways.len(), z, x, y).into()); + } + guard.railways.insert((z, x, y), railways); + } + + + guard.loaded_tiles.insert((z, x, y)); guard.pending_tiles.remove(&(z, x, y)); @@ -881,6 +930,22 @@ pub async fn run() { rpass.draw(0..buffers.road_vertex_count, 0..1); } + // Draw Railways (on top of roads?) + if buffers.railway_vertex_count > 0 { + rpass.set_pipeline(&railway_pipeline); + rpass.set_bind_group(0, &camera_bind_group, &[]); + rpass.set_vertex_buffer(0, buffers.railway_vertex_buffer.slice(..)); + rpass.draw(0..buffers.railway_vertex_count, 0..1); + } + + // Draw Railways (on top of roads?) + if buffers.railway_vertex_count > 0 { + rpass.set_pipeline(&railway_pipeline); + rpass.set_bind_group(0, &camera_bind_group, &[]); + rpass.set_vertex_buffer(0, buffers.railway_vertex_buffer.slice(..)); + rpass.draw(0..buffers.railway_vertex_count, 0..1); + } + // Draw Buildings (middle layer) if buffers.building_index_count > 0 { rpass.set_pipeline(&building_pipeline); @@ -1370,3 +1435,87 @@ fn create_water_pipeline( multiview: None, }) } + +fn create_railway_pipeline( + device: &wgpu::Device, + format: &wgpu::TextureFormat, + bind_group_layout: &wgpu::BindGroupLayout +) -> wgpu::RenderPipeline { + let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: None, + source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#" + struct CameraUniform { + params: vec4, + }; + @group(0) @binding(0) + var camera: CameraUniform; + + struct VertexInput { + @location(0) position: vec2, + }; + + struct VertexOutput { + @builtin(position) clip_position: vec4, + }; + + @vertex + fn vs_main( + model: VertexInput, + ) -> VertexOutput { + var out: VertexOutput; + + let world_pos = model.position; + + let x = world_pos.x * camera.params.x + camera.params.z; + let y = world_pos.y * camera.params.y + camera.params.w; + + out.clip_position = vec4(x, y, 0.0, 1.0); + return out; + } + + @fragment + fn fs_main(in: VertexOutput) -> @location(0) vec4 { + return vec4(0.3, 0.3, 0.3, 1.0); // Dark grey for railways + } + "#)), + }); + + let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { + label: Some("Railway Pipeline Layout"), + bind_group_layouts: &[bind_group_layout], + push_constant_ranges: &[], + }); + + device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { + label: None, + layout: Some(&pipeline_layout), + vertex: wgpu::VertexState { + module: &shader, + entry_point: "vs_main", + buffers: &[ + Vertex::desc(), + ], + }, + fragment: Some(wgpu::FragmentState { + module: &shader, + entry_point: "fs_main", + targets: &[Some(wgpu::ColorTargetState { + format: *format, + blend: Some(wgpu::BlendState::REPLACE), + write_mask: wgpu::ColorWrites::ALL, + })], + }), + primitive: wgpu::PrimitiveState { + topology: wgpu::PrimitiveTopology::LineList, + strip_index_format: None, + front_face: wgpu::FrontFace::Ccw, + cull_mode: None, + unclipped_depth: false, + polygon_mode: wgpu::PolygonMode::Fill, + conservative: false, + }, + depth_stencil: None, + multisample: wgpu::MultisampleState::default(), + multiview: None, + }) +} diff --git a/importer/Cargo.toml b/importer/Cargo.toml index e7c5a45..5fc4866 100644 --- a/importer/Cargo.toml +++ b/importer/Cargo.toml @@ -8,3 +8,4 @@ osmpbf = "0.3" # Pure Rust PBF parser, easier to build than osmium (C++ bindings scylla = "0.12" tokio = { version = "1.0", features = ["full"] } anyhow = "1.0" +sled = "0.34" diff --git a/importer/src/main.rs b/importer/src/main.rs index 3073e29..3bd002e 100644 --- a/importer/src/main.rs +++ b/importer/src/main.rs @@ -9,7 +9,16 @@ async fn main() -> Result<()> { // Connect to ScyllaDB let uri = std::env::var("SCYLLA_URI").unwrap_or_else(|_| "127.0.0.1:9042".to_string()); println!("Connecting to ScyllaDB at {}...", uri); - let session = SessionBuilder::new().known_node(uri).build().await?; + + let session = loop { + match SessionBuilder::new().known_node(&uri).build().await { + Ok(session) => break session, + Err(e) => { + println!("Failed to connect to ScyllaDB: {}. Retrying in 5 seconds...", e); + tokio::time::sleep(std::time::Duration::from_secs(5)).await; + } + } + }; let session = std::sync::Arc::new(session); // Ensure schema exists @@ -18,40 +27,52 @@ async fn main() -> Result<()> { // Create tables session.query("CREATE TABLE IF NOT EXISTS map_data.nodes (zoom int, tile_x int, tile_y int, id bigint, lat double, lon double, tags map, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?; session.query("CREATE TABLE IF NOT EXISTS map_data.ways (zoom int, tile_x int, tile_y int, id bigint, tags map, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?; + session.query("CREATE TABLE IF NOT EXISTS map_data.buildings (zoom int, tile_x int, tile_y int, id bigint, tags map, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?; + session.query("CREATE TABLE IF NOT EXISTS map_data.water (zoom int, tile_x int, tile_y int, id bigint, tags map, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?; + session.query("CREATE TABLE IF NOT EXISTS map_data.landuse (zoom int, tile_x int, tile_y int, id bigint, tags map, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?; + session.query("CREATE TABLE IF NOT EXISTS map_data.railways (zoom int, tile_x int, tile_y int, id bigint, tags map, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?; - let path = "sample.osm.pbf"; + let path = std::env::var("OSM_PBF_PATH").unwrap_or_else(|_| "europe-latest.osm.pbf".to_string()); println!("Reading {}...", path); let reader = ElementReader::from_path(path)?; // Cache for node coordinates: ID -> (lat, lon) - let mut node_cache = HashMap::::new(); + // Use sled for disk-based caching to avoid OOM, limit cache to 512MB + let node_cache = sled::Config::new() + .path("node_cache") + .cache_capacity(512 * 1024 * 1024) + .open()?; - let mut join_set = JoinSet::new(); - let mut node_count = 0; - let mut way_count = 0; - let mut inserted_nodes = 0; - let mut inserted_ways = 0; - let mut inserted_buildings = 0; - let mut inserted_water = 0; - let mut inserted_landuse = 0; + // Channel for backpressure + // Producer (reader) -> Consumer (writer) + enum DbTask { + Node { id: i64, lat: f64, lon: f64, tags: HashMap, x: i32, y: i32 }, + Way { table: &'static str, id: i64, tags: HashMap, points: Vec, x: i32, y: i32 }, + } - // We process sequentially: Nodes first, then Ways. - reader.for_each(|element| { - match element { - Element::Node(node) => { - node_count += 1; - node_cache.insert(node.id(), (node.lat(), node.lon())); + let (tx, mut rx) = tokio::sync::mpsc::channel::(10_000); - if node.tags().count() > 0 { - inserted_nodes += 1; - let session = session.clone(); - let id = node.id(); - let lat = node.lat(); - let lon = node.lon(); - let tags: HashMap = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - - let (x, y) = lat_lon_to_tile(lat, lon, 10); + let session_clone = session.clone(); + let consumer_handle = tokio::spawn(async move { + let mut join_set = JoinSet::new(); + let mut inserted_count = 0; + let max_concurrent = std::env::var("CONCURRENT_INSERTS") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(128); // Default to 128 concurrent inserts + println!("Starting consumer with max_concurrent={}", max_concurrent); + + while let Some(task) = rx.recv().await { + let session = session_clone.clone(); + + // Backpressure: limit concurrent inserts + while join_set.len() >= max_concurrent { + join_set.join_next().await; + } + + match task { + DbTask::Node { id, lat, lon, tags, x, y } => { join_set.spawn(async move { let _ = session.query( "INSERT INTO map_data.nodes (zoom, tile_x, tile_y, id, lat, lon, tags) VALUES (?, ?, ?, ?, ?, ?, ?)", @@ -59,135 +80,168 @@ async fn main() -> Result<()> { ).await; }); } - } - Element::DenseNode(node) => { - node_count += 1; - node_cache.insert(node.id(), (node.lat(), node.lon())); - - if node.tags().count() > 0 { - inserted_nodes += 1; - let session = session.clone(); - let id = node.id(); - let lat = node.lat(); - let lon = node.lon(); - let tags: HashMap = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - - let (x, y) = lat_lon_to_tile(lat, lon, 10); - + DbTask::Way { table, id, tags, points, x, y } => { + let query = format!("INSERT INTO map_data.{} (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)", table); join_set.spawn(async move { let _ = session.query( - "INSERT INTO map_data.nodes (zoom, tile_x, tile_y, id, lat, lon, tags) VALUES (?, ?, ?, ?, ?, ?, ?)", - (10, x, y, id, lat, lon, tags), + query, + (10, x, y, id, tags, points), ).await; }); } } - Element::Way(way) => { - way_count += 1; - let tags: HashMap = way.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - - // Filter for highways/roads OR buildings OR landuse OR water - let is_highway = tags.contains_key("highway"); - let is_building = tags.contains_key("building"); - let is_water = tags.get("natural").map(|v| v == "water").unwrap_or(false) || - tags.get("waterway").map(|v| v == "riverbank").unwrap_or(false) || - tags.get("landuse").map(|v| v == "basin").unwrap_or(false); - let is_landuse = tags.get("leisure").map(|v| v == "park" || v == "garden").unwrap_or(false) || - tags.get("landuse").map(|v| v == "grass" || v == "forest" || v == "meadow").unwrap_or(false) || - tags.get("natural").map(|v| v == "wood" || v == "scrub").unwrap_or(false); - - if is_highway || is_building || is_water || is_landuse { - let mut points = Vec::new(); - - // Resolve nodes - for node_id in way.refs() { - if let Some(&coords) = node_cache.get(&node_id) { - points.push(coords); - } - } - - if points.len() >= 2 { - let session = session.clone(); - let id = way.id(); - - // Insert into the tile of the first point - let (first_lat, first_lon) = points[0]; - let (x, y) = lat_lon_to_tile(first_lat, first_lon, 10); - - // Serialize points to blob (f64, f64) pairs - let mut blob = Vec::with_capacity(points.len() * 16); - for (lat, lon) in points { - blob.extend_from_slice(&lat.to_be_bytes()); - blob.extend_from_slice(&lon.to_be_bytes()); - } - - if is_highway { - inserted_ways += 1; - let tags_clone = tags.clone(); - let blob_clone = blob.clone(); - let session = session.clone(); - join_set.spawn(async move { - let _ = session.query( - "INSERT INTO map_data.ways (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)", - (10, x, y, id, tags_clone, blob_clone), - ).await; - }); - } - - if is_building { - let tags_clone = tags.clone(); - let blob_clone = blob.clone(); - let session = session.clone(); - join_set.spawn(async move { - let _ = session.query( - "INSERT INTO map_data.buildings (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)", - (10, x, y, id, tags_clone, blob_clone), - ).await; - }); - inserted_buildings += 1; - } - - if is_water { - let tags_clone = tags.clone(); - let blob_clone = blob.clone(); - let session = session.clone(); - join_set.spawn(async move { - let _ = session.query( - "INSERT INTO map_data.water (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)", - (10, x, y, id, tags_clone, blob_clone), - ).await; - }); - inserted_water += 1; - } - - if is_landuse { - let tags_clone = tags.clone(); - let blob_clone = blob.clone(); - let session = session.clone(); - join_set.spawn(async move { - let _ = session.query( - "INSERT INTO map_data.landuse (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)", - (10, x, y, id, tags_clone, blob_clone), - ).await; - }); - inserted_landuse += 1; - } - } - } - } - _ => {} + inserted_count += 1; } + + // Wait for remaining tasks + while let Some(_) = join_set.join_next().await {} + println!("Consumer finished. Total inserted tasks: {}", inserted_count); + }); + + // Run the PBF reader in a blocking task to allow blocking_send + let tx_clone = tx.clone(); + let reader_handle = tokio::task::spawn_blocking(move || -> Result<(usize, usize)> { + let tx = tx_clone; + let mut node_count = 0; + let mut way_count = 0; + + // We process sequentially: Nodes first, then Ways. + reader.for_each(|element| { + match element { + Element::Node(node) => { + node_count += 1; + + // Store in sled: key=id (8 bytes), value=lat+lon (16 bytes) + let id_bytes = node.id().to_be_bytes(); + let mut coords = [0u8; 16]; + coords[0..8].copy_from_slice(&node.lat().to_be_bytes()); + coords[8..16].copy_from_slice(&node.lon().to_be_bytes()); + let _ = node_cache.insert(id_bytes, &coords); + + if node.tags().count() > 0 { + let id = node.id(); + let lat = node.lat(); + let lon = node.lon(); + let tags: HashMap = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + let (x, y) = lat_lon_to_tile(lat, lon, 10); + + let task = DbTask::Node { id, lat, lon, tags, x, y }; + let _ = tx.blocking_send(task); + } + } + Element::DenseNode(node) => { + node_count += 1; + + // Store in sled + let id_bytes = node.id().to_be_bytes(); + let mut coords = [0u8; 16]; + coords[0..8].copy_from_slice(&node.lat().to_be_bytes()); + coords[8..16].copy_from_slice(&node.lon().to_be_bytes()); + let _ = node_cache.insert(id_bytes, &coords); + + if node.tags().count() > 0 { + let id = node.id(); + let lat = node.lat(); + let lon = node.lon(); + let tags: HashMap = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + let (x, y) = lat_lon_to_tile(lat, lon, 10); + + let task = DbTask::Node { id, lat, lon, tags, x, y }; + let _ = tx.blocking_send(task); + } + } + Element::Way(way) => { + way_count += 1; + let tags: HashMap = way.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + + // Filter for highways/roads OR buildings OR landuse OR water OR railways + let is_highway = tags.contains_key("highway"); + let is_building = tags.contains_key("building"); + let is_water = tags.get("natural").map(|v| v == "water").unwrap_or(false) || + tags.get("waterway").map(|v| v == "riverbank").unwrap_or(false) || + tags.get("landuse").map(|v| v == "basin").unwrap_or(false); + let is_landuse = tags.get("leisure").map(|v| v == "park" || v == "garden").unwrap_or(false) || + tags.get("landuse").map(|v| v == "grass" || v == "forest" || v == "meadow").unwrap_or(false) || + tags.get("natural").map(|v| v == "wood" || v == "scrub").unwrap_or(false); + let is_railway = tags.contains_key("railway"); + + if is_highway || is_building || is_water || is_landuse || is_railway { + let mut points = Vec::new(); + + // Resolve nodes from sled + for node_id in way.refs() { + let id_bytes = node_id.to_be_bytes(); + if let Ok(Some(coords_bytes)) = node_cache.get(id_bytes) { + let lat = f64::from_be_bytes(coords_bytes[0..8].try_into().unwrap()); + let lon = f64::from_be_bytes(coords_bytes[8..16].try_into().unwrap()); + points.push((lat, lon)); + } + } + + if points.len() >= 2 { + let id = way.id(); + + // Insert into the tile of the first point + let (first_lat, first_lon) = points[0]; + let (x, y) = lat_lon_to_tile(first_lat, first_lon, 10); + + // Serialize points to blob (f64, f64) pairs + let mut blob = Vec::with_capacity(points.len() * 16); + for (lat, lon) in points { + blob.extend_from_slice(&lat.to_be_bytes()); + blob.extend_from_slice(&lon.to_be_bytes()); + } + + if is_highway { + let task = DbTask::Way { table: "ways", id, tags: tags.clone(), points: blob.clone(), x, y }; + let _ = tx.blocking_send(task); + } + + if is_building { + let task = DbTask::Way { table: "buildings", id, tags: tags.clone(), points: blob.clone(), x, y }; + let _ = tx.blocking_send(task); + } + + if is_water { + let task = DbTask::Way { table: "water", id, tags: tags.clone(), points: blob.clone(), x, y }; + let _ = tx.blocking_send(task); + } + + if is_landuse { + let task = DbTask::Way { table: "landuse", id, tags: tags.clone(), points: blob.clone(), x, y }; + let _ = tx.blocking_send(task); + } + + if is_railway { + let task = DbTask::Way { table: "railways", id, tags: tags.clone(), points: blob.clone(), x, y }; + let _ = tx.blocking_send(task); + } + } + } + } + _ => {} + } + + if (node_count + way_count) % 100_000 == 0 { + println!("Processed {} nodes, {} ways...", node_count, way_count); + } + })?; - if (node_count + way_count) % 100_000 == 0 { - println!("Processed {} nodes, {} ways...", node_count, way_count); - } - })?; + Ok((node_count, way_count)) + }); - println!("Finished processing. Nodes: {}, Ways: {}. Inserted Nodes: {}, Inserted Ways: {}, Buildings: {}, Water: {}, Landuse: {}", - node_count, way_count, inserted_nodes, inserted_ways, inserted_buildings, inserted_water, inserted_landuse); + let (node_count, way_count) = reader_handle.await??; + + println!("Finished reading PBF. Nodes: {}, Ways: {}. Waiting for consumer...", node_count, way_count); - println!("Waiting for pending inserts..."); - while let Some(_) = join_set.join_next().await {} + // Drop sender to signal consumer to finish + drop(tx); + + // Wait for consumer + consumer_handle.await?; + + // Clean up cache + let _ = std::fs::remove_dir_all("node_cache"); println!("Done!"); Ok(()) diff --git a/run.sh b/run.sh new file mode 100644 index 0000000..ed6737d --- /dev/null +++ b/run.sh @@ -0,0 +1 @@ +docker compose --profile import up --build importer \ No newline at end of file