update
This commit is contained in:
1519
importer/Cargo.lock
generated
Normal file
1519
importer/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
23
importer/src/domain/mod.rs
Normal file
23
importer/src/domain/mod.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DbTask {
|
||||
Node {
|
||||
zoom: i32,
|
||||
id: i64,
|
||||
lat: f64,
|
||||
lon: f64,
|
||||
tags: HashMap<String, String>,
|
||||
x: i32,
|
||||
y: i32
|
||||
},
|
||||
Way {
|
||||
zoom: i32,
|
||||
table: &'static str,
|
||||
id: i64,
|
||||
tags: HashMap<String, String>,
|
||||
points: Vec<u8>,
|
||||
x: i32,
|
||||
y: i32
|
||||
},
|
||||
}
|
||||
@@ -1,376 +1,28 @@
|
||||
mod domain;
|
||||
mod repositories;
|
||||
mod services;
|
||||
mod parsers; // Empty for now, but kept for structure
|
||||
|
||||
use anyhow::Result;
|
||||
use earcutr::earcut;
|
||||
use osmpbf::{Element, ElementReader};
|
||||
use scylla::SessionBuilder;
|
||||
use std::collections::HashMap;
|
||||
use tokio::task::JoinSet;
|
||||
use std::fs::{File, OpenOptions};
|
||||
use std::io::{BufWriter, Write, Seek, SeekFrom};
|
||||
use std::path::{Path, PathBuf};
|
||||
use memmap2::Mmap;
|
||||
use std::sync::Arc;
|
||||
|
||||
const ZOOM_LEVELS: [u32; 6] = [2, 4, 6, 9, 12, 14];
|
||||
|
||||
// Store way geometries for multipolygon assembly
|
||||
struct WayStore {
|
||||
ways: HashMap<i64, Vec<i64>>, // way_id -> node_id list
|
||||
}
|
||||
|
||||
impl WayStore {
|
||||
fn new() -> Self {
|
||||
Self { ways: HashMap::new() }
|
||||
}
|
||||
|
||||
fn insert(&mut self, way_id: i64, node_refs: Vec<i64>) {
|
||||
self.ways.insert(way_id, node_refs);
|
||||
}
|
||||
|
||||
fn get(&self, way_id: i64) -> Option<&Vec<i64>> {
|
||||
self.ways.get(&way_id)
|
||||
}
|
||||
}
|
||||
|
||||
// Store railway ways for deferred insertion (after relation processing for colors)
|
||||
struct RailwayWay {
|
||||
id: i64,
|
||||
tags: HashMap<String, String>,
|
||||
points: Vec<u8>, // Serialized line blob
|
||||
first_lat: f64,
|
||||
first_lon: f64,
|
||||
}
|
||||
|
||||
struct RailwayStore {
|
||||
ways: HashMap<i64, RailwayWay>, // way_id -> railway data
|
||||
way_colors: HashMap<i64, String>, // way_id -> colour from route relation
|
||||
}
|
||||
|
||||
impl RailwayStore {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
ways: HashMap::new(),
|
||||
way_colors: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_way(&mut self, id: i64, tags: HashMap<String, String>, points: Vec<u8>, first_lat: f64, first_lon: f64) {
|
||||
self.ways.insert(id, RailwayWay { id, tags, points, first_lat, first_lon });
|
||||
}
|
||||
|
||||
fn set_color(&mut self, way_id: i64, color: String) {
|
||||
// Only set if not already set (first route relation wins)
|
||||
self.way_colors.entry(way_id).or_insert(color);
|
||||
}
|
||||
|
||||
fn get_color(&self, way_id: i64) -> Option<&String> {
|
||||
self.way_colors.get(&way_id)
|
||||
}
|
||||
}
|
||||
|
||||
// Assemble ways into MULTIPLE rings (connect end-to-end)
|
||||
// Rivers like the Isar have multiple separate channels/rings
|
||||
fn assemble_rings(way_ids: &[i64], way_store: &WayStore) -> Vec<Vec<i64>> {
|
||||
if way_ids.is_empty() { return Vec::new(); }
|
||||
|
||||
// Get all way geometries
|
||||
let mut segments: Vec<Vec<i64>> = Vec::new();
|
||||
for &way_id in way_ids {
|
||||
if let Some(nodes) = way_store.get(way_id) {
|
||||
if nodes.len() >= 2 {
|
||||
segments.push(nodes.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if segments.is_empty() { return Vec::new(); }
|
||||
|
||||
let mut completed_rings: Vec<Vec<i64>> = Vec::new();
|
||||
|
||||
// Keep assembling rings until we run out of segments
|
||||
while !segments.is_empty() {
|
||||
// Start a new ring with the first available segment
|
||||
let mut ring = segments.remove(0);
|
||||
|
||||
// Try to extend this ring
|
||||
let max_iterations = segments.len() * segments.len() + 100;
|
||||
let mut iterations = 0;
|
||||
|
||||
loop {
|
||||
iterations += 1;
|
||||
if iterations > max_iterations { break; }
|
||||
|
||||
let mut connected = false;
|
||||
|
||||
for i in 0..segments.len() {
|
||||
let seg = &segments[i];
|
||||
if seg.is_empty() { continue; }
|
||||
|
||||
let ring_start = *ring.first().unwrap();
|
||||
let ring_end = *ring.last().unwrap();
|
||||
let seg_start = *seg.first().unwrap();
|
||||
let seg_end = *seg.last().unwrap();
|
||||
|
||||
if ring_end == seg_start {
|
||||
// Connect: ring + seg (skip first node of seg)
|
||||
ring.extend(seg[1..].iter().cloned());
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
} else if ring_end == seg_end {
|
||||
// Connect: ring + reversed seg
|
||||
let reversed: Vec<i64> = seg.iter().rev().cloned().collect();
|
||||
ring.extend(reversed[1..].iter().cloned());
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
} else if ring_start == seg_end {
|
||||
// Connect: seg + ring
|
||||
let mut new_ring = seg.clone();
|
||||
new_ring.extend(ring[1..].iter().cloned());
|
||||
ring = new_ring;
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
} else if ring_start == seg_start {
|
||||
// Connect: reversed seg + ring
|
||||
let mut reversed: Vec<i64> = seg.iter().rev().cloned().collect();
|
||||
reversed.extend(ring[1..].iter().cloned());
|
||||
ring = reversed;
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if ring is now closed
|
||||
if ring.len() >= 4 && ring.first() == ring.last() {
|
||||
completed_rings.push(ring);
|
||||
break; // Move to next ring
|
||||
}
|
||||
|
||||
// If no connection was made and ring isn't closed,
|
||||
// we can't extend this ring anymore
|
||||
if !connected {
|
||||
// Still save partial rings if they have enough points
|
||||
// This helps with incomplete data - at least show something
|
||||
if ring.len() >= 4 {
|
||||
// Force-close the ring
|
||||
let first = ring[0];
|
||||
ring.push(first);
|
||||
completed_rings.push(ring);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
completed_rings
|
||||
}
|
||||
|
||||
struct NodeStore {
|
||||
writer: Option<BufWriter<File>>,
|
||||
mmap: Option<Mmap>,
|
||||
path: PathBuf,
|
||||
last_id: i64,
|
||||
}
|
||||
|
||||
impl NodeStore {
|
||||
fn new<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let path = path.as_ref().to_path_buf();
|
||||
let file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.open(&path)?;
|
||||
|
||||
let writer = BufWriter::with_capacity(10 * 1024 * 1024, file); // 10MB buffer
|
||||
|
||||
Ok(Self {
|
||||
writer: Some(writer),
|
||||
mmap: None,
|
||||
path,
|
||||
last_id: -1,
|
||||
})
|
||||
}
|
||||
|
||||
fn insert(&mut self, id: i64, lat: f64, lon: f64) -> Result<()> {
|
||||
if let Some(writer) = &mut self.writer {
|
||||
if id > self.last_id + 1 {
|
||||
let gap = id - self.last_id - 1;
|
||||
writer.seek(SeekFrom::Current(gap * 8))?;
|
||||
} else if id <= self.last_id {
|
||||
writer.seek(SeekFrom::Start(id as u64 * 8))?;
|
||||
}
|
||||
|
||||
let lat_i32 = (lat * 1e7) as i32;
|
||||
let lon_i32 = (lon * 1e7) as i32;
|
||||
writer.write_all(&lat_i32.to_le_bytes())?;
|
||||
writer.write_all(&lon_i32.to_le_bytes())?;
|
||||
|
||||
self.last_id = id;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn prepare_for_reading(&mut self) -> Result<()> {
|
||||
self.writer = None; // Flush and close writer
|
||||
|
||||
let file = File::open(&self.path)?;
|
||||
let mmap = unsafe { Mmap::map(&file)? };
|
||||
self.mmap = Some(mmap);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, id: i64) -> Option<(f64, f64)> {
|
||||
if let Some(mmap) = &self.mmap {
|
||||
let offset = id as usize * 8;
|
||||
if offset + 8 <= mmap.len() {
|
||||
let chunk = &mmap[offset..offset+8];
|
||||
let lat_i32 = i32::from_le_bytes(chunk[0..4].try_into().unwrap());
|
||||
let lon_i32 = i32::from_le_bytes(chunk[4..8].try_into().unwrap());
|
||||
|
||||
if lat_i32 == 0 && lon_i32 == 0 { return None; }
|
||||
|
||||
return Some((lat_i32 as f64 / 1e7, lon_i32 as f64 / 1e7));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Ramer-Douglas-Peucker simplification
|
||||
fn perpendicular_distance(p: (f64, f64), line_start: (f64, f64), line_end: (f64, f64)) -> f64 {
|
||||
let (x, y) = p;
|
||||
let (x1, y1) = line_start;
|
||||
let (x2, y2) = line_end;
|
||||
|
||||
let dx = x2 - x1;
|
||||
let dy = y2 - y1;
|
||||
|
||||
if dx == 0.0 && dy == 0.0 {
|
||||
return ((x - x1).powi(2) + (y - y1).powi(2)).sqrt();
|
||||
}
|
||||
|
||||
let num = (dy * x - dx * y + x2 * y1 - y2 * x1).abs();
|
||||
let den = (dx.powi(2) + dy.powi(2)).sqrt();
|
||||
|
||||
num / den
|
||||
}
|
||||
|
||||
fn simplify_points(points: &[(f64, f64)], epsilon: f64) -> Vec<(f64, f64)> {
|
||||
if points.len() < 3 {
|
||||
return points.to_vec();
|
||||
}
|
||||
|
||||
let start = points[0];
|
||||
let end = points[points.len() - 1];
|
||||
|
||||
let mut max_dist = 0.0;
|
||||
let mut index = 0;
|
||||
|
||||
for i in 1..points.len() - 1 {
|
||||
let dist = perpendicular_distance(points[i], start, end);
|
||||
if dist > max_dist {
|
||||
max_dist = dist;
|
||||
index = i;
|
||||
}
|
||||
}
|
||||
|
||||
if max_dist > epsilon {
|
||||
let mut left = simplify_points(&points[..=index], epsilon);
|
||||
let mut right = simplify_points(&points[index..], epsilon);
|
||||
|
||||
// Remove duplicate point at split
|
||||
left.pop();
|
||||
left.extend(right);
|
||||
left
|
||||
} else {
|
||||
vec![start, end]
|
||||
}
|
||||
}
|
||||
|
||||
fn triangulate_polygon(points: &[(f64, f64)]) -> Vec<(f64, f64)> {
|
||||
let mut flat_points = Vec::with_capacity(points.len() * 2);
|
||||
for (lat, lon) in points {
|
||||
flat_points.push(*lat);
|
||||
flat_points.push(*lon);
|
||||
}
|
||||
|
||||
// We assume simple polygons (no holes) for now as we are just processing ways
|
||||
let indices = earcut(&flat_points, &[], 2).unwrap_or_default();
|
||||
|
||||
let mut triangles = Vec::with_capacity(indices.len());
|
||||
for i in indices {
|
||||
triangles.push(points[i]);
|
||||
}
|
||||
triangles
|
||||
}
|
||||
|
||||
fn should_include(tags: &HashMap<String, String>, zoom: u32) -> bool {
|
||||
if zoom >= 14 { return true; }
|
||||
|
||||
let highway = tags.get("highway").map(|s| s.as_str());
|
||||
let place = tags.get("place").map(|s| s.as_str());
|
||||
let natural = tags.get("natural").map(|s| s.as_str());
|
||||
let railway = tags.get("railway").map(|s| s.as_str());
|
||||
let waterway = tags.get("waterway").map(|s| s.as_str());
|
||||
|
||||
match zoom {
|
||||
|
||||
2 => {
|
||||
// Space View: Continents and Countries
|
||||
matches!(place, Some("continent" | "country" | "sea" | "ocean")) ||
|
||||
matches!(natural, Some("water" | "bay" | "strait")) || // Major water bodies
|
||||
matches!(highway, Some("motorway")) || // Added motorway
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential")) || // Added more green + farmland/residential
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve")) || // Added parks
|
||||
matches!(natural, Some("wood" | "scrub")) // Added wood/scrub
|
||||
},
|
||||
4 => {
|
||||
// Regional View (NEW)
|
||||
matches!(highway, Some("motorway" | "trunk")) ||
|
||||
matches!(place, Some("city" | "town" | "sea" | "ocean")) ||
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "heath" | "wetland" | "bay" | "strait")) ||
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential")) ||
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve")) ||
|
||||
matches!(waterway, Some("river"))
|
||||
},
|
||||
6 => {
|
||||
// Enterprise Grade: ONLY Motorways and Trunk roads. No primary/secondary.
|
||||
// ONLY Cities. No nature/landuse.
|
||||
matches!(highway, Some("motorway" | "trunk" | "primary")) || // Added primary
|
||||
matches!(place, Some("city" | "sea" | "ocean")) ||
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "heath" | "wetland" | "bay" | "strait")) ||
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential")) ||
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve")) ||
|
||||
matches!(waterway, Some("river"))
|
||||
},
|
||||
9 => {
|
||||
// Enterprise Grade: Add Primary roads.
|
||||
// Add Towns.
|
||||
// Limited nature.
|
||||
matches!(highway, Some("motorway" | "trunk" | "primary")) ||
|
||||
matches!(place, Some("city" | "town" | "sea" | "ocean" | "island" | "islet")) || // Islands!
|
||||
matches!(railway, Some("rail")) ||
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "bay" | "strait" | "wetland" | "heath" | "sand" | "beach" | "shingle" | "bare_rock")) || // Sand/Beaches!
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential" | "basin" | "reservoir" | "allotments")) ||
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve" | "garden")) || // Gardens
|
||||
matches!(waterway, Some("river" | "riverbank" | "canal")) // Added canal
|
||||
},
|
||||
12 => {
|
||||
matches!(highway, Some("motorway" | "trunk" | "primary" | "secondary" | "tertiary" | "residential" | "unclassified" | "pedestrian" | "service" | "track")) || // Added minor roads
|
||||
matches!(place, Some("city" | "town" | "village")) ||
|
||||
matches!(railway, Some("rail")) ||
|
||||
tags.contains_key("building") ||
|
||||
tags.contains_key("landuse") ||
|
||||
tags.contains_key("leisure") ||
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "wetland" | "heath" | "bay" | "strait" | "sand" | "beach" | "bare_rock")) ||
|
||||
matches!(waterway, Some("river" | "riverbank" | "stream" | "canal" | "drain" | "ditch")) // Added canal/drain/ditch
|
||||
},
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
use crate::domain::DbTask;
|
||||
use crate::repositories::{
|
||||
scylla_repository::ScyllaRepository,
|
||||
node_store::NodeStore,
|
||||
way_store::WayStore,
|
||||
railway_store::RailwayStore,
|
||||
};
|
||||
use crate::services::{
|
||||
filtering_service::FilteringService,
|
||||
tile_service::TileService,
|
||||
geometry_service::GeometryService,
|
||||
multipolygon_service::MultipolygonService,
|
||||
railway_service::RailwayService,
|
||||
};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
@@ -381,70 +33,28 @@ async fn main() -> Result<()> {
|
||||
let uri = std::env::var("SCYLLA_URI").unwrap_or_else(|_| "127.0.0.1:9042".to_string());
|
||||
println!("Connecting to ScyllaDB at {}...", uri);
|
||||
|
||||
let session = loop {
|
||||
match SessionBuilder::new().known_node(&uri).build().await {
|
||||
Ok(session) => break session,
|
||||
Err(e) => {
|
||||
println!("Failed to connect to ScyllaDB: {}. Retrying in 5 seconds...", e);
|
||||
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
|
||||
}
|
||||
}
|
||||
};
|
||||
let session = std::sync::Arc::new(session);
|
||||
|
||||
// Ensure schema exists
|
||||
session.query("CREATE KEYSPACE IF NOT EXISTS map_data WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }", &[]).await?;
|
||||
let scylla_repo = Arc::new(ScyllaRepository::connect(&uri).await?);
|
||||
|
||||
// Create tables
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.nodes (zoom int, tile_x int, tile_y int, id bigint, lat double, lon double, tags map<text, text>, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.ways (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.buildings (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.water (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.landuse (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.railways (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
|
||||
// Prepare statements
|
||||
println!("Truncating tables...");
|
||||
session.query("TRUNCATE map_data.nodes", &[]).await?;
|
||||
session.query("TRUNCATE map_data.ways", &[]).await?;
|
||||
session.query("TRUNCATE map_data.buildings", &[]).await?;
|
||||
session.query("TRUNCATE map_data.water", &[]).await?;
|
||||
session.query("TRUNCATE map_data.landuse", &[]).await?;
|
||||
session.query("TRUNCATE map_data.railways", &[]).await?;
|
||||
println!("Tables truncated.");
|
||||
|
||||
println!("Preparing statements...");
|
||||
let insert_node = session.prepare("INSERT INTO map_data.nodes (zoom, tile_x, tile_y, id, lat, lon, tags) VALUES (?, ?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_ways = session.prepare("INSERT INTO map_data.ways (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_buildings = session.prepare("INSERT INTO map_data.buildings (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_water = session.prepare("INSERT INTO map_data.water (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_landuse = session.prepare("INSERT INTO map_data.landuse (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_railways = session.prepare("INSERT INTO map_data.railways (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
println!("Statements prepared.");
|
||||
// Truncate tables
|
||||
scylla_repo.truncate_tables().await?;
|
||||
|
||||
let path = std::env::var("OSM_PBF_PATH")
|
||||
.or_else(|_| std::env::var("HOST_PBF_PATH"))
|
||||
.unwrap_or_else(|_| "europe-latest.osm.pbf".to_string());
|
||||
println!("Reading {}...", path);
|
||||
|
||||
let reader = ElementReader::from_path(path)?;
|
||||
|
||||
// Cache for node coordinates: ID -> (lat, lon)
|
||||
// Use flat file with mmap
|
||||
let cache_dir = std::env::var("CACHE_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let cache_path = std::path::Path::new(&cache_dir).join("node_cache.bin");
|
||||
println!("Using node cache at {:?}", cache_path);
|
||||
let mut node_store = NodeStore::new(cache_path.clone())?;
|
||||
|
||||
// Channel for backpressure
|
||||
// Producer (reader) -> Consumer (writer)
|
||||
enum DbTask {
|
||||
Node { zoom: i32, id: i64, lat: f64, lon: f64, tags: HashMap<String, String>, x: i32, y: i32 },
|
||||
Way { zoom: i32, table: &'static str, id: i64, tags: HashMap<String, String>, points: Vec<u8>, x: i32, y: i32 },
|
||||
}
|
||||
|
||||
let (tx, mut rx) = tokio::sync::mpsc::channel::<DbTask>(10_000);
|
||||
|
||||
let session_clone = session.clone();
|
||||
let scylla_repo_clone = scylla_repo.clone();
|
||||
let consumer_handle = tokio::spawn(async move {
|
||||
let mut join_set = JoinSet::new();
|
||||
let mut inserted_count = 0;
|
||||
@@ -456,7 +66,7 @@ async fn main() -> Result<()> {
|
||||
println!("Starting consumer with max_concurrent={}", max_concurrent);
|
||||
|
||||
while let Some(task) = rx.recv().await {
|
||||
let session = session_clone.clone();
|
||||
let repo = scylla_repo_clone.clone();
|
||||
|
||||
// Backpressure: limit concurrent inserts
|
||||
while join_set.len() >= max_concurrent {
|
||||
@@ -465,29 +75,13 @@ async fn main() -> Result<()> {
|
||||
|
||||
match task {
|
||||
DbTask::Node { zoom, id, lat, lon, tags, x, y } => {
|
||||
let statement = insert_node.clone();
|
||||
join_set.spawn(async move {
|
||||
let _ = session.execute(
|
||||
&statement,
|
||||
(zoom, x, y, id, lat, lon, tags),
|
||||
).await;
|
||||
let _ = repo.insert_node(zoom, id, lat, lon, tags, x, y).await;
|
||||
});
|
||||
}
|
||||
DbTask::Way { zoom, table, id, tags, points, x, y } => {
|
||||
let statement = match table {
|
||||
"ways" => insert_ways.clone(),
|
||||
"buildings" => insert_buildings.clone(),
|
||||
"water" => insert_water.clone(),
|
||||
"landuse" => insert_landuse.clone(),
|
||||
"railways" => insert_railways.clone(),
|
||||
_ => panic!("Unknown table: {}", table),
|
||||
};
|
||||
|
||||
join_set.spawn(async move {
|
||||
let _ = session.execute(
|
||||
&statement,
|
||||
(zoom, x, y, id, tags, points),
|
||||
).await;
|
||||
let _ = repo.insert_way(table, zoom, id, tags, points, x, y).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -499,7 +93,7 @@ async fn main() -> Result<()> {
|
||||
println!("Consumer finished. Total inserted tasks: {}", inserted_count);
|
||||
});
|
||||
|
||||
// Run the PBF reader in a blocking task to allow blocking_send
|
||||
// Run the PBF reader in a blocking task
|
||||
let tx_clone = tx.clone();
|
||||
let reader_handle = tokio::task::spawn_blocking(move || -> Result<(usize, usize, usize)> {
|
||||
let tx = tx_clone;
|
||||
@@ -509,16 +103,9 @@ async fn main() -> Result<()> {
|
||||
let mut ways_pending = false;
|
||||
let mut relations_pending = false;
|
||||
|
||||
// Store way geometries for multipolygon assembly
|
||||
let mut way_store = WayStore::new();
|
||||
|
||||
// Store railway ways for deferred insertion (after relation processing for colors)
|
||||
let mut railway_store = RailwayStore::new();
|
||||
|
||||
// We process sequentially: Nodes first, then Ways, then Relations.
|
||||
// osmpbf yields nodes then ways then relations.
|
||||
// We need to detect when we switch from nodes to ways to prepare the store.
|
||||
|
||||
reader.for_each(|element| {
|
||||
match element {
|
||||
Element::Node(node) => {
|
||||
@@ -531,9 +118,9 @@ async fn main() -> Result<()> {
|
||||
let lon = node.lon();
|
||||
let tags: HashMap<String, String> = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
||||
|
||||
for &zoom in &ZOOM_LEVELS {
|
||||
if should_include(&tags, zoom) {
|
||||
let (x, y) = lat_lon_to_tile(lat, lon, zoom);
|
||||
for &zoom in &FilteringService::ZOOM_LEVELS {
|
||||
if FilteringService::should_include(&tags, zoom) {
|
||||
let (x, y) = TileService::lat_lon_to_tile(lat, lon, zoom);
|
||||
let task = DbTask::Node { zoom: zoom as i32, id, lat, lon, tags: tags.clone(), x, y };
|
||||
let _ = tx.blocking_send(task);
|
||||
}
|
||||
@@ -550,9 +137,9 @@ async fn main() -> Result<()> {
|
||||
let lon = node.lon();
|
||||
let tags: HashMap<String, String> = node.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
||||
|
||||
for &zoom in &ZOOM_LEVELS {
|
||||
if should_include(&tags, zoom) {
|
||||
let (x, y) = lat_lon_to_tile(lat, lon, zoom);
|
||||
for &zoom in &FilteringService::ZOOM_LEVELS {
|
||||
if FilteringService::should_include(&tags, zoom) {
|
||||
let (x, y) = TileService::lat_lon_to_tile(lat, lon, zoom);
|
||||
let task = DbTask::Node { zoom: zoom as i32, id, lat, lon, tags: tags.clone(), x, y };
|
||||
let _ = tx.blocking_send(task);
|
||||
}
|
||||
@@ -561,7 +148,6 @@ async fn main() -> Result<()> {
|
||||
}
|
||||
Element::Way(way) => {
|
||||
if !ways_pending {
|
||||
// First way encountered. Prepare store for reading.
|
||||
println!("Switching to Way processing. Flushing node cache...");
|
||||
if let Err(e) = node_store.prepare_for_reading() {
|
||||
eprintln!("Failed to prepare node store: {}", e);
|
||||
@@ -572,18 +158,14 @@ async fn main() -> Result<()> {
|
||||
|
||||
way_count += 1;
|
||||
|
||||
// Store ALL way node refs for potential multipolygon use
|
||||
let node_refs: Vec<i64> = way.refs().collect();
|
||||
way_store.insert(way.id(), node_refs.clone());
|
||||
|
||||
let tags: HashMap<String, String> = way.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
||||
|
||||
// Filter for highways/roads OR buildings OR landuse OR water OR railways
|
||||
// Filter for highways/roads OR buildings OR landuse OR water OR railways
|
||||
let is_highway = tags.contains_key("highway");
|
||||
let is_building = tags.contains_key("building");
|
||||
|
||||
// Split Water into Area (Polygon) and Line (Way)
|
||||
let is_water_area = tags.get("natural").map(|v| v == "water" || v == "wetland" || v == "bay" || v == "strait").unwrap_or(false) ||
|
||||
tags.get("place").map(|v| v == "sea" || v == "ocean").unwrap_or(false) ||
|
||||
tags.get("waterway").map(|v| v == "riverbank" || v == "dock").unwrap_or(false) ||
|
||||
@@ -599,7 +181,6 @@ async fn main() -> Result<()> {
|
||||
if is_highway || is_building || is_water_area || is_water_line || is_landuse || is_railway {
|
||||
let mut points = Vec::new();
|
||||
|
||||
// Resolve nodes from store
|
||||
for node_id in way.refs() {
|
||||
if let Some((lat, lon)) = node_store.get(node_id) {
|
||||
points.push((lat, lon));
|
||||
@@ -608,94 +189,75 @@ async fn main() -> Result<()> {
|
||||
|
||||
if points.len() >= 2 {
|
||||
let id = way.id();
|
||||
|
||||
|
||||
// Insert into the tile of the first point
|
||||
let (first_lat, first_lon) = points[0];
|
||||
let is_closed = points.first() == points.last();
|
||||
|
||||
// Detect if we should treat this as an area
|
||||
let mut treat_as_water_area = is_water_area && is_closed;
|
||||
let mut treat_as_landuse = is_landuse && is_closed;
|
||||
let mut treat_as_building = is_building && is_closed;
|
||||
|
||||
// Fallback: If water is open (e.g. riverbank segment), treat as line
|
||||
let mut treat_as_water_line = is_water_line || (is_water_area && !is_closed);
|
||||
|
||||
// If landuse/building is open, we skip it to avoid artifacts (giant triangles)
|
||||
if (is_landuse || is_building) && !is_closed {
|
||||
return;
|
||||
}
|
||||
|
||||
for &zoom in &ZOOM_LEVELS {
|
||||
if !should_include(&tags, zoom) { continue; }
|
||||
for &zoom in &FilteringService::ZOOM_LEVELS {
|
||||
if !FilteringService::should_include(&tags, zoom) { continue; }
|
||||
|
||||
// Apply simplification based on zoom level
|
||||
let base_epsilon = match zoom {
|
||||
2 => 0.01, // Was 0.0001 (~11m) -> Now ~1km
|
||||
4 => 0.002, // Was 0.00005 (~5m) -> Now ~200m
|
||||
6 => 0.0005, // Was 0.00002 (~2m) -> Now ~50m
|
||||
9 => 0.0001, // Was 0.00001 (~1m) -> Now ~10m
|
||||
2 => 0.01,
|
||||
4 => 0.002,
|
||||
6 => 0.0005,
|
||||
9 => 0.0001,
|
||||
12 => 0.000005,
|
||||
_ => 0.0,
|
||||
};
|
||||
|
||||
let epsilon = if treat_as_water_area || treat_as_landuse || is_highway || treat_as_water_line {
|
||||
if zoom <= 4 && treat_as_landuse {
|
||||
0.0 // Disable simplification for landuse at low zoom
|
||||
0.0
|
||||
} else if treat_as_water_area || treat_as_landuse {
|
||||
// User requested "little more detail"
|
||||
// Almost disable simplification for organic shapes
|
||||
if zoom >= 9 {
|
||||
0.0 // No simplification at zoom 9+
|
||||
} else {
|
||||
base_epsilon * 0.01 // 1% of standard simplification - high detail
|
||||
}
|
||||
if zoom >= 9 { 0.0 } else { base_epsilon * 0.01 }
|
||||
} else {
|
||||
base_epsilon * 0.5 // Highways/Railways can handle some simplification
|
||||
base_epsilon * 0.5
|
||||
}
|
||||
} else {
|
||||
base_epsilon
|
||||
};
|
||||
|
||||
let simplified_points = if epsilon > 0.0 {
|
||||
simplify_points(&points, epsilon)
|
||||
GeometryService::simplify_points(&points, epsilon)
|
||||
} else {
|
||||
points.clone()
|
||||
};
|
||||
|
||||
// Serialize points
|
||||
let mut final_points = simplified_points.clone();
|
||||
|
||||
// For highways and railways, we DON'T triangulate - they're line data
|
||||
// Create the highway/railway blob BEFORE triangulation
|
||||
// Create blob for line features (highways/railways/water lines)
|
||||
let mut line_blob = Vec::with_capacity(simplified_points.len() * 8);
|
||||
for (lat, lon) in &simplified_points {
|
||||
line_blob.extend_from_slice(&(*lat as f32).to_le_bytes());
|
||||
line_blob.extend_from_slice(&(*lon as f32).to_le_bytes());
|
||||
}
|
||||
|
||||
// Triangulate for polygon types
|
||||
if treat_as_building || treat_as_water_area || treat_as_landuse {
|
||||
// Already checked closure above
|
||||
final_points = triangulate_polygon(&final_points);
|
||||
final_points = GeometryService::triangulate_polygon(&final_points);
|
||||
}
|
||||
|
||||
if final_points.len() < 3 && (treat_as_building || treat_as_water_area || treat_as_landuse) { continue; }
|
||||
if simplified_points.len() < 2 && (is_highway || is_railway || treat_as_water_line) { continue; }
|
||||
|
||||
let (first_lat, first_lon) = simplified_points[0];
|
||||
let (x, y) = lat_lon_to_tile(first_lat, first_lon, zoom);
|
||||
let (x, y) = TileService::lat_lon_to_tile(first_lat, first_lon, zoom);
|
||||
let zoom_i32 = zoom as i32;
|
||||
|
||||
// Create polygon blob from triangulated points
|
||||
let mut polygon_blob = Vec::with_capacity(final_points.len() * 8);
|
||||
for (lat, lon) in &final_points {
|
||||
polygon_blob.extend_from_slice(&(*lat as f32).to_le_bytes());
|
||||
polygon_blob.extend_from_slice(&(*lon as f32).to_le_bytes());
|
||||
}
|
||||
|
||||
// Use line_blob for highways/railways/water_lines, polygon_blob for others
|
||||
if is_highway || treat_as_water_line {
|
||||
let task = DbTask::Way { zoom: zoom_i32, table: "ways", id, tags: tags.clone(), points: line_blob.clone(), x, y };
|
||||
let _ = tx.blocking_send(task);
|
||||
@@ -717,7 +279,6 @@ async fn main() -> Result<()> {
|
||||
}
|
||||
|
||||
if is_railway {
|
||||
// Store for deferred insertion - colors will be applied from relations
|
||||
let (first_lat, first_lon) = simplified_points[0];
|
||||
railway_store.insert_way(id, tags.clone(), line_blob.clone(), first_lat, first_lon);
|
||||
}
|
||||
@@ -734,48 +295,25 @@ async fn main() -> Result<()> {
|
||||
relation_count += 1;
|
||||
let tags: HashMap<String, String> = rel.tags().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
||||
|
||||
// Process route relations for transit colors
|
||||
if tags.get("type").map(|t| t == "route").unwrap_or(false) {
|
||||
let route_type = tags.get("route").map(|s| s.as_str());
|
||||
let is_transit = match route_type {
|
||||
Some("subway") | Some("tram") | Some("light_rail") => true,
|
||||
Some("train") => {
|
||||
// Only include S-Bahn and suburban trains
|
||||
tags.get("network").map(|n| n.contains("S-Bahn")).unwrap_or(false) ||
|
||||
tags.get("service").map(|s| s == "suburban").unwrap_or(false) ||
|
||||
tags.get("ref").map(|r| r.starts_with("S")).unwrap_or(false)
|
||||
},
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if is_transit {
|
||||
// Extract colour tag
|
||||
if let Some(colour) = tags.get("colour").or(tags.get("color")) {
|
||||
// Map colour to all member ways
|
||||
for member in rel.members() {
|
||||
if let osmpbf::RelMemberType::Way = member.member_type {
|
||||
railway_store.set_color(member.member_id, colour.clone());
|
||||
}
|
||||
}
|
||||
if let Some(colour) = RailwayService::get_route_color(&tags) {
|
||||
for member in rel.members() {
|
||||
if let osmpbf::RelMemberType::Way = member.member_type {
|
||||
railway_store.set_color(member.member_id, colour.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process multipolygon relations (existing code)
|
||||
if tags.get("type").map(|t| t == "multipolygon").unwrap_or(false) {
|
||||
// Check if it's a water or landuse multipolygon
|
||||
// IMPORTANT: Rivers like the Isar are tagged waterway=river on the relation itself!
|
||||
let is_water = tags.get("natural").map(|v| v == "water" || v == "wetland" || v == "bay").unwrap_or(false) ||
|
||||
tags.get("waterway").map(|v| v == "riverbank" || v == "river" || v == "canal").unwrap_or(false) ||
|
||||
tags.get("water").is_some() || // Also check water=* tag
|
||||
tags.get("landuse").map(|v| v == "basin" || v == "reservoir").unwrap_or(false);
|
||||
let is_water = tags.get("natural").map(|v| v == "water" || v == "wetland" || v == "bay").unwrap_or(false) ||
|
||||
tags.get("waterway").map(|v| v == "riverbank" || v == "river" || v == "canal").unwrap_or(false) ||
|
||||
tags.get("water").is_some() ||
|
||||
tags.get("landuse").map(|v| v == "basin" || v == "reservoir").unwrap_or(false);
|
||||
|
||||
let is_landuse = tags.get("landuse").is_some() ||
|
||||
tags.get("leisure").map(|v| v == "park" || v == "nature_reserve" || v == "garden").unwrap_or(false) ||
|
||||
tags.get("natural").map(|v| v == "wood" || v == "scrub" || v == "heath").unwrap_or(false);
|
||||
|
||||
if is_water || is_landuse {
|
||||
// Collect outer way members
|
||||
let mut outer_ways: Vec<i64> = Vec::new();
|
||||
for member in rel.members() {
|
||||
if member.role().unwrap_or("") == "outer" {
|
||||
@@ -786,11 +324,9 @@ async fn main() -> Result<()> {
|
||||
}
|
||||
|
||||
if !outer_ways.is_empty() {
|
||||
// Assemble ALL rings from the outer ways (rivers have multiple rings!)
|
||||
let rings = assemble_rings(&outer_ways, &way_store);
|
||||
let rings = MultipolygonService::assemble_rings(&outer_ways, &way_store);
|
||||
|
||||
for ring_node_ids in rings {
|
||||
// Resolve node coordinates
|
||||
let mut points: Vec<(f64, f64)> = Vec::new();
|
||||
for node_id in &ring_node_ids {
|
||||
if let Some((lat, lon)) = node_store.get(*node_id) {
|
||||
@@ -802,17 +338,16 @@ async fn main() -> Result<()> {
|
||||
let id = rel.id();
|
||||
let (first_lat, first_lon) = points[0];
|
||||
|
||||
for &zoom in &ZOOM_LEVELS {
|
||||
if !should_include(&tags, zoom) { continue; }
|
||||
for &zoom in &FilteringService::ZOOM_LEVELS {
|
||||
if !FilteringService::should_include(&tags, zoom) { continue; }
|
||||
|
||||
// No simplification for multipolygons
|
||||
let final_points = triangulate_polygon(&points);
|
||||
let final_points = GeometryService::triangulate_polygon(&points);
|
||||
if final_points.len() < 3 { continue; }
|
||||
|
||||
let (x, y) = lat_lon_to_tile(first_lat, first_lon, zoom);
|
||||
let (x, y) = TileService::lat_lon_to_tile(first_lat, first_lon, zoom);
|
||||
let zoom_i32 = zoom as i32;
|
||||
|
||||
// Create polygon blob
|
||||
let mut polygon_blob = Vec::with_capacity(final_points.len() * 8);
|
||||
for (lat, lon) in &final_points {
|
||||
polygon_blob.extend_from_slice(&(*lat as f32).to_le_bytes());
|
||||
@@ -837,27 +372,26 @@ async fn main() -> Result<()> {
|
||||
}
|
||||
})?;
|
||||
|
||||
// Deferred railway insertion - now with colors from route relations
|
||||
println!("Inserting {} railway ways with colors...", railway_store.ways.len());
|
||||
for (way_id, railway) in &railway_store.ways {
|
||||
let mut tags = railway.tags.clone();
|
||||
|
||||
// Apply color from route relation if available
|
||||
if let Some(colour) = railway_store.get_color(*way_id) {
|
||||
let (railways, colors) = railway_store.into_data();
|
||||
println!("Inserting {} railway ways with colors...", railways.len());
|
||||
for (id, railway) in railways {
|
||||
let mut tags = railway.tags;
|
||||
|
||||
if let Some(colour) = colors.get(&id) {
|
||||
tags.insert("colour".to_string(), colour.clone());
|
||||
}
|
||||
|
||||
|
||||
// Insert for all applicable zoom levels
|
||||
for &zoom in &ZOOM_LEVELS {
|
||||
if !should_include(&tags, zoom) { continue; }
|
||||
|
||||
let (x, y) = lat_lon_to_tile(railway.first_lat, railway.first_lon, zoom);
|
||||
for &zoom in &FilteringService::ZOOM_LEVELS {
|
||||
if !FilteringService::should_include(&tags, zoom) { continue; }
|
||||
|
||||
let (x, y) = TileService::lat_lon_to_tile(railway.first_lat, railway.first_lon, zoom);
|
||||
let zoom_i32 = zoom as i32;
|
||||
|
||||
|
||||
let task = DbTask::Way {
|
||||
zoom: zoom_i32,
|
||||
table: "railways",
|
||||
id: railway.id,
|
||||
id,
|
||||
tags: tags.clone(),
|
||||
points: railway.points.clone(),
|
||||
x,
|
||||
@@ -890,12 +424,9 @@ async fn main() -> Result<()> {
|
||||
for table in &tables {
|
||||
println!("Compacting map_data.{}...", table);
|
||||
let query = format!("ALTER TABLE map_data.{} WITH gc_grace_seconds = 0", table);
|
||||
let _ = session.query(query, &[]).await;
|
||||
let _ = scylla_repo.get_session().query(query, &[]).await;
|
||||
}
|
||||
|
||||
// Force a flush to ensure all data is on disk before compaction
|
||||
// Note: In ScyllaDB, compaction happens automatically, but we set gc_grace_seconds=0
|
||||
// to allow immediate tombstone cleanup. For manual compaction, use nodetool externally.
|
||||
println!("Compaction settings updated. Tombstones will be cleaned during next compaction cycle.");
|
||||
println!("For immediate compaction, run: docker exec scylla nodetool compact map_data");
|
||||
|
||||
@@ -903,11 +434,3 @@ async fn main() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lat_lon_to_tile(lat: f64, lon: f64, zoom: u32) -> (i32, i32) {
|
||||
let n = 2.0f64.powi(zoom as i32);
|
||||
let x = (lon + 180.0) / 360.0 * n;
|
||||
let lat_rad = lat.to_radians();
|
||||
let y = (1.0 - (lat_rad.tan() + (1.0 / lat_rad.cos())).ln() / std::f64::consts::PI) / 2.0 * n;
|
||||
(x as i32, y as i32)
|
||||
}
|
||||
|
||||
1
importer/src/parsers/mod.rs
Normal file
1
importer/src/parsers/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
// Parsing logic is currently in main.rs loop
|
||||
5
importer/src/repositories/mod.rs
Normal file
5
importer/src/repositories/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod way_store;
|
||||
pub mod railway_store;
|
||||
pub mod node_store;
|
||||
pub mod scylla_repository;
|
||||
|
||||
77
importer/src/repositories/node_store.rs
Normal file
77
importer/src/repositories/node_store.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use std::fs::{File, OpenOptions};
|
||||
use std::io::{BufWriter, Write, Seek, SeekFrom};
|
||||
use std::path::{Path, PathBuf};
|
||||
use memmap2::Mmap;
|
||||
use anyhow::Result;
|
||||
|
||||
pub struct NodeStore {
|
||||
writer: Option<BufWriter<File>>,
|
||||
mmap: Option<Mmap>,
|
||||
path: PathBuf,
|
||||
last_id: i64,
|
||||
}
|
||||
|
||||
impl NodeStore {
|
||||
pub fn new<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let path = path.as_ref().to_path_buf();
|
||||
let file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.open(&path)?;
|
||||
|
||||
let writer = BufWriter::with_capacity(10 * 1024 * 1024, file); // 10MB buffer
|
||||
|
||||
Ok(Self {
|
||||
writer: Some(writer),
|
||||
mmap: None,
|
||||
path,
|
||||
last_id: -1,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, id: i64, lat: f64, lon: f64) -> Result<()> {
|
||||
if let Some(writer) = &mut self.writer {
|
||||
if id > self.last_id + 1 {
|
||||
let gap = id - self.last_id - 1;
|
||||
writer.seek(SeekFrom::Current(gap * 8))?;
|
||||
} else if id <= self.last_id {
|
||||
writer.seek(SeekFrom::Start(id as u64 * 8))?;
|
||||
}
|
||||
|
||||
let lat_i32 = (lat * 1e7) as i32;
|
||||
let lon_i32 = (lon * 1e7) as i32;
|
||||
writer.write_all(&lat_i32.to_le_bytes())?;
|
||||
writer.write_all(&lon_i32.to_le_bytes())?;
|
||||
|
||||
self.last_id = id;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn prepare_for_reading(&mut self) -> Result<()> {
|
||||
self.writer = None; // Flush and close writer
|
||||
|
||||
let file = File::open(&self.path)?;
|
||||
let mmap = unsafe { Mmap::map(&file)? };
|
||||
self.mmap = Some(mmap);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get(&self, id: i64) -> Option<(f64, f64)> {
|
||||
if let Some(mmap) = &self.mmap {
|
||||
let offset = id as usize * 8;
|
||||
if offset + 8 <= mmap.len() {
|
||||
let chunk = &mmap[offset..offset+8];
|
||||
let lat_i32 = i32::from_le_bytes(chunk[0..4].try_into().unwrap());
|
||||
let lon_i32 = i32::from_le_bytes(chunk[4..8].try_into().unwrap());
|
||||
|
||||
if lat_i32 == 0 && lon_i32 == 0 { return None; }
|
||||
|
||||
return Some((lat_i32 as f64 / 1e7, lon_i32 as f64 / 1e7));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
41
importer/src/repositories/railway_store.rs
Normal file
41
importer/src/repositories/railway_store.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
// Store railway ways for deferred insertion (after relation processing for colors)
|
||||
pub struct RailwayWay {
|
||||
pub id: i64,
|
||||
pub tags: HashMap<String, String>,
|
||||
pub points: Vec<u8>, // Serialized line blob
|
||||
pub first_lat: f64,
|
||||
pub first_lon: f64,
|
||||
}
|
||||
|
||||
pub struct RailwayStore {
|
||||
ways: HashMap<i64, RailwayWay>, // way_id -> railway data
|
||||
way_colors: HashMap<i64, String>, // way_id -> colour from route relation
|
||||
}
|
||||
|
||||
impl RailwayStore {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
ways: HashMap::new(),
|
||||
way_colors: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_way(&mut self, id: i64, tags: HashMap<String, String>, points: Vec<u8>, first_lat: f64, first_lon: f64) {
|
||||
self.ways.insert(id, RailwayWay { id, tags, points, first_lat, first_lon });
|
||||
}
|
||||
|
||||
pub fn set_color(&mut self, way_id: i64, color: String) {
|
||||
// Only set if not already set (first route relation wins)
|
||||
self.way_colors.entry(way_id).or_insert(color);
|
||||
}
|
||||
|
||||
pub fn get_color(&self, way_id: i64) -> Option<&String> {
|
||||
self.way_colors.get(&way_id)
|
||||
}
|
||||
|
||||
pub fn into_data(self) -> (HashMap<i64, RailwayWay>, HashMap<i64, String>) {
|
||||
(self.ways, self.way_colors)
|
||||
}
|
||||
}
|
||||
101
importer/src/repositories/scylla_repository.rs
Normal file
101
importer/src/repositories/scylla_repository.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
use scylla::{Session, SessionBuilder};
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
use tokio::task::JoinSet;
|
||||
use anyhow::Result;
|
||||
|
||||
pub struct ScyllaRepository {
|
||||
session: Arc<Session>,
|
||||
insert_node: scylla::statement::prepared_statement::PreparedStatement,
|
||||
insert_ways: scylla::statement::prepared_statement::PreparedStatement,
|
||||
insert_buildings: scylla::statement::prepared_statement::PreparedStatement,
|
||||
insert_water: scylla::statement::prepared_statement::PreparedStatement,
|
||||
insert_landuse: scylla::statement::prepared_statement::PreparedStatement,
|
||||
insert_railways: scylla::statement::prepared_statement::PreparedStatement,
|
||||
}
|
||||
|
||||
impl ScyllaRepository {
|
||||
pub async fn connect(uri: &str) -> Result<Self> {
|
||||
let session = loop {
|
||||
match SessionBuilder::new().known_node(uri).build().await {
|
||||
Ok(session) => break session,
|
||||
Err(e) => {
|
||||
println!("Failed to connect to ScyllaDB: {}. Retrying in 5 seconds...", e);
|
||||
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let session = Arc::new(session);
|
||||
|
||||
// Ensure schema exists
|
||||
session.query("CREATE KEYSPACE IF NOT EXISTS map_data WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }", &[]).await?;
|
||||
|
||||
// Create tables
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.nodes (zoom int, tile_x int, tile_y int, id bigint, lat double, lon double, tags map<text, text>, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.ways (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.buildings (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.water (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.landuse (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
session.query("CREATE TABLE IF NOT EXISTS map_data.railways (zoom int, tile_x int, tile_y int, id bigint, tags map<text, text>, points blob, PRIMARY KEY ((zoom, tile_x, tile_y), id))", &[]).await?;
|
||||
|
||||
// Prepare statements
|
||||
let insert_node = session.prepare("INSERT INTO map_data.nodes (zoom, tile_x, tile_y, id, lat, lon, tags) VALUES (?, ?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_ways = session.prepare("INSERT INTO map_data.ways (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_buildings = session.prepare("INSERT INTO map_data.buildings (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_water = session.prepare("INSERT INTO map_data.water (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_landuse = session.prepare("INSERT INTO map_data.landuse (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
let insert_railways = session.prepare("INSERT INTO map_data.railways (zoom, tile_x, tile_y, id, tags, points) VALUES (?, ?, ?, ?, ?, ?)").await?;
|
||||
|
||||
Ok(Self {
|
||||
session,
|
||||
insert_node,
|
||||
insert_ways,
|
||||
insert_buildings,
|
||||
insert_water,
|
||||
insert_landuse,
|
||||
insert_railways,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn truncate_tables(&self) -> Result<()> {
|
||||
println!("Truncating tables...");
|
||||
self.session.query("TRUNCATE map_data.nodes", &[]).await?;
|
||||
self.session.query("TRUNCATE map_data.ways", &[]).await?;
|
||||
self.session.query("TRUNCATE map_data.buildings", &[]).await?;
|
||||
self.session.query("TRUNCATE map_data.water", &[]).await?;
|
||||
self.session.query("TRUNCATE map_data.landuse", &[]).await?;
|
||||
self.session.query("TRUNCATE map_data.railways", &[]).await?;
|
||||
println!("Tables truncated.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn insert_node(&self, zoom: i32, id: i64, lat: f64, lon: f64, tags: HashMap<String, String>, x: i32, y: i32) -> Result<()> {
|
||||
self.session.execute(
|
||||
&self.insert_node,
|
||||
(zoom, x, y, id, lat, lon, tags),
|
||||
).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn insert_way(&self, table: &str, zoom: i32, id: i64, tags: HashMap<String, String>, points: Vec<u8>, x: i32, y: i32) -> Result<()> {
|
||||
let statement = match table {
|
||||
"ways" => &self.insert_ways,
|
||||
"buildings" => &self.insert_buildings,
|
||||
"water" => &self.insert_water,
|
||||
"landuse" => &self.insert_landuse,
|
||||
"railways" => &self.insert_railways,
|
||||
_ => panic!("Unknown table: {}", table),
|
||||
};
|
||||
|
||||
self.session.execute(
|
||||
statement,
|
||||
(zoom, x, y, id, tags, points),
|
||||
).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_session(&self) -> Arc<Session> {
|
||||
self.session.clone()
|
||||
}
|
||||
}
|
||||
20
importer/src/repositories/way_store.rs
Normal file
20
importer/src/repositories/way_store.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
// Store way geometries for multipolygon assembly
|
||||
pub struct WayStore {
|
||||
ways: HashMap<i64, Vec<i64>>, // way_id -> node_id list
|
||||
}
|
||||
|
||||
impl WayStore {
|
||||
pub fn new() -> Self {
|
||||
Self { ways: HashMap::new() }
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, way_id: i64, node_refs: Vec<i64>) {
|
||||
self.ways.insert(way_id, node_refs);
|
||||
}
|
||||
|
||||
pub fn get(&self, way_id: i64) -> Option<&Vec<i64>> {
|
||||
self.ways.get(&way_id)
|
||||
}
|
||||
}
|
||||
98
importer/src/services/filtering_service.rs
Normal file
98
importer/src/services/filtering_service.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub struct FilteringService;
|
||||
|
||||
impl FilteringService {
|
||||
pub const ZOOM_LEVELS: [u32; 6] = [2, 4, 6, 9, 12, 14];
|
||||
|
||||
pub fn should_include(tags: &HashMap<String, String>, zoom: u32) -> bool {
|
||||
if zoom >= 14 { return true; }
|
||||
|
||||
let highway = tags.get("highway").map(|s| s.as_str());
|
||||
let place = tags.get("place").map(|s| s.as_str());
|
||||
let natural = tags.get("natural").map(|s| s.as_str());
|
||||
let railway = tags.get("railway").map(|s| s.as_str());
|
||||
let waterway = tags.get("waterway").map(|s| s.as_str());
|
||||
|
||||
match zoom {
|
||||
2 => {
|
||||
// Space View: Continents and Countries
|
||||
matches!(place, Some("continent" | "country" | "sea" | "ocean")) ||
|
||||
matches!(natural, Some("water" | "bay" | "strait")) || // Major water bodies
|
||||
matches!(highway, Some("motorway")) || // Added motorway
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential")) || // Added more green + farmland/residential
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve")) || // Added parks
|
||||
matches!(natural, Some("wood" | "scrub")) // Added wood/scrub
|
||||
},
|
||||
4 => {
|
||||
// Regional View (NEW)
|
||||
matches!(highway, Some("motorway" | "trunk")) ||
|
||||
matches!(place, Some("city" | "town" | "sea" | "ocean")) ||
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "heath" | "wetland" | "bay" | "strait")) ||
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential")) ||
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve")) ||
|
||||
matches!(waterway, Some("river"))
|
||||
},
|
||||
6 => {
|
||||
// Enterprise Grade: ONLY Motorways and Trunk roads. No primary/secondary.
|
||||
// ONLY Cities. No nature/landuse.
|
||||
matches!(highway, Some("motorway" | "trunk" | "primary")) || // Added primary
|
||||
matches!(place, Some("city" | "sea" | "ocean")) ||
|
||||
matches!(railway, Some("rail")) || // Major rail lines
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "heath" | "wetland" | "bay" | "strait")) ||
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential")) ||
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve")) ||
|
||||
matches!(waterway, Some("river"))
|
||||
},
|
||||
9 => {
|
||||
// Enterprise Grade: Add Primary roads.
|
||||
// Add Towns.
|
||||
// Limited nature.
|
||||
matches!(highway, Some("motorway" | "trunk" | "primary")) ||
|
||||
matches!(place, Some("city" | "town" | "sea" | "ocean" | "island" | "islet")) || // Islands!
|
||||
matches!(railway, Some("rail" | "subway" | "light_rail" | "narrow_gauge")) || // Added urban transit
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "bay" | "strait" | "wetland" | "heath" | "sand" | "beach" | "shingle" | "bare_rock")) || // Sand/Beaches!
|
||||
matches!(tags.get("landuse").map(|s| s.as_str()), Some("forest" | "grass" | "meadow" | "farmland" | "residential" | "basin" | "reservoir" | "allotments")) ||
|
||||
matches!(tags.get("leisure").map(|s| s.as_str()), Some("park" | "nature_reserve" | "garden")) || // Gardens
|
||||
matches!(waterway, Some("river" | "riverbank" | "canal")) // Added canal
|
||||
},
|
||||
12 => {
|
||||
matches!(highway, Some("motorway" | "trunk" | "primary" | "secondary" | "tertiary" | "residential" | "unclassified" | "pedestrian" | "service" | "track")) || // Added minor roads
|
||||
matches!(place, Some("city" | "town" | "village")) ||
|
||||
matches!(railway, Some("rail" | "subway" | "light_rail" | "narrow_gauge" | "tram")) || // Added tram
|
||||
tags.contains_key("building") ||
|
||||
tags.contains_key("landuse") ||
|
||||
tags.contains_key("leisure") ||
|
||||
matches!(natural, Some("water" | "wood" | "scrub" | "wetland" | "heath" | "bay" | "strait" | "sand" | "beach" | "bare_rock")) ||
|
||||
matches!(waterway, Some("river" | "riverbank" | "stream" | "canal" | "drain" | "ditch")) // Added canal/drain/ditch
|
||||
},
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_should_include_motorway_zoom_2() {
|
||||
let mut tags = HashMap::new();
|
||||
tags.insert("highway".to_string(), "motorway".to_string());
|
||||
assert!(FilteringService::should_include(&tags, 2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_not_include_path_zoom_2() {
|
||||
let mut tags = HashMap::new();
|
||||
tags.insert("highway".to_string(), "path".to_string());
|
||||
assert!(!FilteringService::should_include(&tags, 2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_include_park_zoom_12() {
|
||||
let mut tags = HashMap::new();
|
||||
tags.insert("leisure".to_string(), "park".to_string());
|
||||
assert!(FilteringService::should_include(&tags, 12));
|
||||
}
|
||||
}
|
||||
91
importer/src/services/geometry_service.rs
Normal file
91
importer/src/services/geometry_service.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Point {
|
||||
pub x: f64,
|
||||
pub y: f64,
|
||||
}
|
||||
|
||||
impl Point {
|
||||
pub fn new(x: f64, y: f64) -> Self {
|
||||
Self { x, y }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GeometryService;
|
||||
|
||||
impl GeometryService {
|
||||
// Ramer-Douglas-Peucker simplification
|
||||
pub fn simplify_points(points: &[(f64, f64)], epsilon: f64) -> Vec<(f64, f64)> {
|
||||
if points.len() < 3 {
|
||||
return points.to_vec();
|
||||
}
|
||||
|
||||
let start = points[0];
|
||||
let end = points[points.len() - 1];
|
||||
|
||||
let mut max_dist = 0.0;
|
||||
let mut index = 0;
|
||||
|
||||
for i in 1..points.len() - 1 {
|
||||
let dist = Self::perpendicular_distance(points[i], start, end);
|
||||
if dist > max_dist {
|
||||
max_dist = dist;
|
||||
index = i;
|
||||
}
|
||||
}
|
||||
|
||||
if max_dist > epsilon {
|
||||
let mut left = Self::simplify_points(&points[..=index], epsilon);
|
||||
let mut right = Self::simplify_points(&points[index..], epsilon);
|
||||
|
||||
// Remove duplicate point at split
|
||||
left.pop();
|
||||
left.extend(right);
|
||||
left
|
||||
} else {
|
||||
vec![start, end]
|
||||
}
|
||||
}
|
||||
|
||||
fn perpendicular_distance(p: (f64, f64), line_start: (f64, f64), line_end: (f64, f64)) -> f64 {
|
||||
let (x, y) = p;
|
||||
let (x1, y1) = line_start;
|
||||
let (x2, y2) = line_end;
|
||||
|
||||
let dx = x2 - x1;
|
||||
let dy = y2 - y1;
|
||||
|
||||
if dx == 0.0 && dy == 0.0 {
|
||||
return ((x - x1).powi(2) + (y - y1).powi(2)).sqrt();
|
||||
}
|
||||
|
||||
let num = (dy * x - dx * y + x2 * y1 - y2 * x1).abs();
|
||||
let den = (dx.powi(2) + dy.powi(2)).sqrt();
|
||||
|
||||
num / den
|
||||
}
|
||||
|
||||
pub fn triangulate_polygon(points: &[(f64, f64)]) -> Vec<(f64, f64)> {
|
||||
let mut flat_points = Vec::with_capacity(points.len() * 2);
|
||||
for (lat, lon) in points {
|
||||
flat_points.push(*lat);
|
||||
flat_points.push(*lon);
|
||||
}
|
||||
|
||||
// We assume simple polygons (no holes) for now as we are just processing ways
|
||||
// Complex multipolygons with holes are handled via relation processing which naturally closes rings
|
||||
// But for triangulation, earcutr handles holes if we provide hole indices.
|
||||
// For basic way polygons, we assume no holes.
|
||||
|
||||
let triangles = earcutr::earcut(&flat_points, &[], 2).unwrap_or_default();
|
||||
|
||||
let mut result = Vec::with_capacity(triangles.len());
|
||||
for i in triangles {
|
||||
let lat = flat_points[i * 2];
|
||||
let lon = flat_points[i * 2 + 1];
|
||||
result.push((lat, lon));
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
6
importer/src/services/mod.rs
Normal file
6
importer/src/services/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
pub mod geometry_service;
|
||||
pub mod multipolygon_service;
|
||||
pub mod filtering_service;
|
||||
pub mod tile_service;
|
||||
pub mod railway_service;
|
||||
|
||||
105
importer/src/services/multipolygon_service.rs
Normal file
105
importer/src/services/multipolygon_service.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use crate::repositories::way_store::WayStore;
|
||||
|
||||
pub struct MultipolygonService;
|
||||
|
||||
impl MultipolygonService {
|
||||
// Assemble ways into MULTIPLE rings (connect end-to-end)
|
||||
// Rivers like the Isar have multiple separate channels/rings
|
||||
pub fn assemble_rings(way_ids: &[i64], way_store: &WayStore) -> Vec<Vec<i64>> {
|
||||
if way_ids.is_empty() { return Vec::new(); }
|
||||
|
||||
// Get all way geometries
|
||||
let mut segments: Vec<Vec<i64>> = Vec::new();
|
||||
for &way_id in way_ids {
|
||||
if let Some(nodes) = way_store.get(way_id) {
|
||||
if nodes.len() >= 2 {
|
||||
segments.push(nodes.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if segments.is_empty() { return Vec::new(); }
|
||||
|
||||
let mut completed_rings: Vec<Vec<i64>> = Vec::new();
|
||||
|
||||
// Keep assembling rings until we run out of segments
|
||||
while !segments.is_empty() {
|
||||
// Start a new ring with the first available segment
|
||||
let mut ring = segments.remove(0);
|
||||
|
||||
// Try to extend this ring
|
||||
let max_iterations = segments.len() * segments.len() + 100;
|
||||
let mut iterations = 0;
|
||||
|
||||
loop {
|
||||
iterations += 1;
|
||||
if iterations > max_iterations { break; }
|
||||
|
||||
let mut connected = false;
|
||||
|
||||
for i in 0..segments.len() {
|
||||
let seg = &segments[i];
|
||||
if seg.is_empty() { continue; }
|
||||
|
||||
let ring_start = *ring.first().unwrap();
|
||||
let ring_end = *ring.last().unwrap();
|
||||
let seg_start = *seg.first().unwrap();
|
||||
let seg_end = *seg.last().unwrap();
|
||||
|
||||
if ring_end == seg_start {
|
||||
// Connect: ring + seg (skip first node of seg)
|
||||
ring.extend(seg[1..].iter().cloned());
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
} else if ring_end == seg_end {
|
||||
// Connect: ring + reversed seg
|
||||
let reversed: Vec<i64> = seg.iter().rev().cloned().collect();
|
||||
ring.extend(reversed[1..].iter().cloned());
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
} else if ring_start == seg_end {
|
||||
// Connect: seg + ring
|
||||
let mut new_ring = seg.clone();
|
||||
new_ring.extend(ring[1..].iter().cloned());
|
||||
ring = new_ring;
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
} else if ring_start == seg_start {
|
||||
// Connect: reversed seg + ring
|
||||
let mut reversed: Vec<i64> = seg.iter().rev().cloned().collect();
|
||||
reversed.extend(ring[1..].iter().cloned());
|
||||
ring = reversed;
|
||||
segments.remove(i);
|
||||
connected = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if ring is now closed
|
||||
if ring.len() >= 4 && ring.first() == ring.last() {
|
||||
completed_rings.push(ring);
|
||||
break; // Move to next ring
|
||||
}
|
||||
|
||||
// If no connection was made and ring isn't closed,
|
||||
// we can't extend this ring anymore
|
||||
if !connected {
|
||||
// Still save partial rings if they have enough points
|
||||
// This helps with incomplete data - at least show something
|
||||
if ring.len() >= 4 {
|
||||
// Force-close the ring
|
||||
let first = ring[0];
|
||||
ring.push(first);
|
||||
completed_rings.push(ring);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
completed_rings
|
||||
}
|
||||
}
|
||||
28
importer/src/services/railway_service.rs
Normal file
28
importer/src/services/railway_service.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub struct RailwayService;
|
||||
|
||||
impl RailwayService {
|
||||
// Determine if a relation is a transit route and return its color
|
||||
pub fn get_route_color(tags: &HashMap<String, String>) -> Option<String> {
|
||||
if tags.get("type").map(|t| t == "route").unwrap_or(false) {
|
||||
let route_type = tags.get("route").map(|s| s.as_str());
|
||||
let is_transit = match route_type {
|
||||
Some("subway") | Some("tram") | Some("light_rail") => true,
|
||||
Some("train") => {
|
||||
// Only include S-Bahn and suburban trains
|
||||
tags.get("network").map(|n| n.contains("S-Bahn")).unwrap_or(false) ||
|
||||
tags.get("service").map(|s| s == "suburban").unwrap_or(false) ||
|
||||
tags.get("ref").map(|r| r.starts_with("S")).unwrap_or(false)
|
||||
},
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if is_transit {
|
||||
// Extract colour tag
|
||||
return tags.get("colour").or(tags.get("color")).cloned();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
13
importer/src/services/tile_service.rs
Normal file
13
importer/src/services/tile_service.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use std::f64::consts::PI;
|
||||
|
||||
pub struct TileService;
|
||||
|
||||
impl TileService {
|
||||
pub fn lat_lon_to_tile(lat: f64, lon: f64, zoom: u32) -> (i32, i32) {
|
||||
let n = 2.0_f64.powi(zoom as i32);
|
||||
let x = ((lon + 180.0) / 360.0 * n).floor() as i32;
|
||||
let lat_rad = lat.to_radians();
|
||||
let y = ((1.0 - (lat_rad.tan() + 1.0 / lat_rad.cos()).ln() / PI) / 2.0 * n).floor() as i32;
|
||||
(x, y)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user