This commit is contained in:
Dongho Kim
2025-12-15 23:10:38 +09:00
parent 169997eecd
commit 45807e3a90
19 changed files with 3438 additions and 2256 deletions

3
frontend/favicon.svg Normal file
View File

@@ -0,0 +1,3 @@
<svg width="500" height="500" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"><path d="M 11.27 100.00 A 42.54 42.54 0 0 1 96.35 100.00 Z" fill="#FF5656" style="mix-blend-mode: multiply"></path><path d="M 100.00 7.85 A 42.47 42.47 0 0 0 100.00 92.79 Z" fill="#8CE4FF" style="mix-blend-mode: multiply"></path><path d="M 18.87 0.00 A 38.03 38.03 0 0 0 94.94 0.00 Z" fill="#FFA239" style="mix-blend-mode: multiply"></path><path d="M 0.00 23.22 A 35.33 35.33 0 0 1 0.00 93.88 Z" fill="#FEEE91" style="mix-blend-mode: multiply"></path><circle cx="76.43" cy="75.68" r="2.31" fill="#FFFFFF"></circle></svg>

After

Width:  |  Height:  |  Size: 637 B

File diff suppressed because it is too large Load Diff

52
frontend/src/camera.rs Normal file
View File

@@ -0,0 +1,52 @@
//! Camera and input state management
/// GPU-compatible camera uniform data
#[repr(C)]
#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]
pub struct CameraUniform {
/// x: scale_x, y: scale_y, z: translate_x, w: translate_y
pub params: [f32; 4],
/// x: is_dark (1.0 for dark, 0.0 for light), y,z,w: padding
pub theme: [f32; 4],
}
/// Camera state for 2D map view
pub struct Camera {
pub x: f32,
pub y: f32,
pub zoom: f32,
pub aspect: f32,
}
impl Camera {
pub fn to_uniform(&self, is_dark: bool) -> CameraUniform {
// Simple 2D orthographic projection-like transform
// We want to map world coordinates to clip space [-1, 1]
// zoom controls how much of the world we see.
// aspect ratio correction is needed for non-square windows.
// Scale:
// If zoom is 1.0, we see 2.0 units of world height (from -1 to 1).
// scale_y = zoom
// scale_x = zoom / aspect
CameraUniform {
params: [
self.zoom / self.aspect, // scale_x
-self.zoom, // scale_y (flipped for North-Up)
-self.x * (self.zoom / self.aspect), // translate_x
self.y * self.zoom, // translate_y (flipped sign)
],
theme: [
if is_dark { 1.0 } else { 0.0 },
0.0, 0.0, 0.0
],
}
}
}
/// Mouse/touch input state for drag operations
pub struct InputState {
pub is_dragging: bool,
pub last_cursor: Option<(f64, f64)>,
}

60
frontend/src/geo.rs Normal file
View File

@@ -0,0 +1,60 @@
//! Geographic projection and location filtering utilities
/// Web Mercator Projection
/// Returns (x, y) in range [0.0, 1.0] for the whole world
pub fn project(lat: f64, lon: f64) -> (f32, f32) {
let x = (lon + 180.0) / 360.0;
let lat_rad = lat.to_radians();
let y = (1.0 - (lat_rad.tan() + (1.0 / lat_rad.cos())).ln() / std::f64::consts::PI) / 2.0;
// Validate results - clamp to valid range and handle NaN/Infinity
let x = if x.is_finite() { (x as f32).clamp(0.0, 1.0) } else { 0.5 };
let y = if y.is_finite() { (y as f32).clamp(0.0, 1.0) } else { 0.5 };
(x, y)
}
/// Kalman filter for smoothing GPS location updates
#[derive(Debug, Clone)]
pub struct KalmanFilter {
pub lat: f64,
pub lon: f64,
pub variance: f64,
pub timestamp: f64,
}
impl KalmanFilter {
pub fn new(lat: f64, lon: f64, timestamp: f64) -> Self {
Self {
lat,
lon,
variance: 0.0,
timestamp,
}
}
pub fn process(&mut self, lat: f64, lon: f64, accuracy: f64, timestamp: f64) -> (f64, f64) {
if accuracy <= 0.0 { return (self.lat, self.lon); }
let dt = timestamp - self.timestamp;
if dt < 0.0 { return (self.lat, self.lon); }
// Process noise variance (meters per second)
let q_metres_per_sec = 3.0;
let variance_process = q_metres_per_sec * q_metres_per_sec * dt / 1000.0;
// Prediction step
let variance = self.variance + variance_process;
// Update step
let measurement_variance = accuracy * accuracy;
let k = variance / (variance + measurement_variance);
self.lat = self.lat + k * (lat - self.lat);
self.lon = self.lon + k * (lon - self.lon);
self.variance = (1.0 - k) * variance;
self.timestamp = timestamp;
(self.lat, self.lon)
}
}

360
frontend/src/labels.rs Normal file
View File

@@ -0,0 +1,360 @@
//! Label rendering for map features
use wasm_bindgen::JsCast;
use crate::camera::Camera;
use crate::state::AppState;
use crate::geo::project;
use crate::tiles::get_visible_tiles;
/// A candidate label for rendering
pub struct LabelCandidate {
pub name: String,
pub x: f64,
pub y: f64,
pub priority: i32,
#[allow(dead_code)]
pub is_country: bool,
pub rotation: f64,
pub label_type: LabelType,
pub category: String,
}
/// Type of label for styling
#[derive(Clone, Copy, PartialEq)]
pub enum LabelType {
Country,
City,
Street,
Poi,
}
/// Update DOM labels based on current camera and state
pub fn update_labels(
window: &web_sys::Window,
camera: &Camera,
state: &AppState,
width: f64,
height: f64,
_scale_factor: f64,
) {
let document = window.document().unwrap();
let container = document.get_element_by_id("labels").unwrap();
// Clear existing labels
container.set_inner_html("");
let visible_tiles = get_visible_tiles(camera);
let is_dark = document.document_element().map(|e| e.get_attribute("data-theme").unwrap_or_default() == "dark").unwrap_or(false);
let uniforms = camera.to_uniform(is_dark);
let mut candidates: Vec<LabelCandidate> = Vec::new();
let zoom = camera.zoom;
for tile in &visible_tiles {
if let Some(nodes) = state.nodes.get(&tile) {
for node in nodes {
let place: Option<&str> = node.tags.get("place").map(|s| s.as_str());
let name: Option<&str> = node.tags.get("name").map(|s| s.as_str());
if let (Some(place), Some(name)) = (place, name) {
// 1. Zoom Level Filtering
let should_show = match place {
"continent" | "country" => true,
"city" => zoom > 20.0,
"town" => zoom > 500.0,
"village" | "hamlet" => zoom > 2000.0,
"suburb" => zoom > 5000.0,
_ => false,
};
if !should_show { continue; }
// 2. Priority Calculation
let mut priority: i32 = match place {
"continent" => 1000,
"country" => 100,
"city" => 80,
"town" => 60,
"village" => 40,
"hamlet" => 20,
_ => 0,
};
// Capital bonus
if let Some(capital) = node.tags.get("capital") {
if capital == "yes" {
priority += 10;
}
}
// Population bonus (logarithmic)
if let Some(pop_str) = node.tags.get("population") {
if let Ok(pop) = pop_str.parse::<f64>() {
priority += (pop.log10() * 2.0) as i32;
}
}
// 3. Projection & Screen Coordinates
let (x, y) = project(node.lat, node.lon);
let cx = x * uniforms.params[0] + uniforms.params[2];
let cy = y * uniforms.params[1] + uniforms.params[3];
// Clip check (NDC)
if cx < -1.2 || cx > 1.2 || cy < -1.2 || cy > 1.2 { continue; }
// Direct NDC to CSS Pixel mapping
let client_width = window.inner_width().ok().and_then(|v| v.as_f64()).unwrap_or(width);
let client_height = window.inner_height().ok().and_then(|v| v.as_f64()).unwrap_or(height);
let css_x = (cx as f64 + 1.0) * 0.5 * client_width;
let css_y = (1.0 - cy as f64) * 0.5 * client_height;
let name_string: String = name.to_string();
let label_type = if place == "country" || place == "continent" {
LabelType::Country
} else {
LabelType::City
};
candidates.push(LabelCandidate {
name: name_string,
x: css_x,
y: css_y,
priority,
is_country: place == "country" || place == "continent",
rotation: 0.0,
label_type,
category: "place".to_string(),
});
}
// POI Labels (amenity, leisure, tourism)
let amenity: Option<&str> = node.tags.get("amenity").map(|s| s.as_str());
let leisure: Option<&str> = node.tags.get("leisure").map(|s| s.as_str());
let tourism: Option<&str> = node.tags.get("tourism").map(|s| s.as_str());
let poi_name: Option<&str> = node.tags.get("name").map(|s| s.as_str());
if let Some(poi_name) = poi_name {
if poi_name.is_empty() { continue; }
// Determine POI type and set zoom threshold
let (min_zoom, priority) = if let Some(amenity_type) = amenity {
match amenity_type {
"hospital" => (500.0, 45),
"university" | "college" => (800.0, 40),
"school" => (2000.0, 25),
"pharmacy" | "doctors" => (3000.0, 20),
"restaurant" | "cafe" => (5000.0, 15),
"fuel" | "parking" => (4000.0, 18),
"bank" | "atm" => (4000.0, 17),
_ => (6000.0, 10),
}
} else if let Some(leisure_type) = leisure {
match leisure_type {
"park" | "garden" => (800.0, 38),
"sports_centre" | "stadium" => (1500.0, 32),
"playground" => (4000.0, 15),
_ => (5000.0, 12),
}
} else if let Some(tourism_type) = tourism {
match tourism_type {
"attraction" | "museum" => (500.0, 42),
"hotel" => (2000.0, 28),
"viewpoint" => (1500.0, 30),
_ => (3000.0, 20),
}
} else {
continue;
};
// Zoom filter
if zoom < min_zoom { continue; }
// Project coordinates
let (x, y) = project(node.lat, node.lon);
let cx = x * uniforms.params[0] + uniforms.params[2];
let cy = y * uniforms.params[1] + uniforms.params[3];
if cx < -1.2 || cx > 1.2 || cy < -1.2 || cy > 1.2 { continue; }
let client_width = window.inner_width().ok().and_then(|v| v.as_f64()).unwrap_or(width);
let client_height = window.inner_height().ok().and_then(|v| v.as_f64()).unwrap_or(height);
let css_x = (cx as f64 + 1.0) * 0.5 * client_width;
let css_y = (1.0 - cy as f64) * 0.5 * client_height;
candidates.push(LabelCandidate {
name: poi_name.to_string(),
x: css_x,
y: css_y,
priority,
is_country: false,
rotation: 0.0,
label_type: LabelType::Poi,
category: if let Some(t) = amenity { t.to_string() }
else if let Some(t) = leisure { t.to_string() }
else if let Some(t) = tourism { t.to_string() }
else { "generic".to_string() },
});
}
}
}
}
// Process ways for street labels
let client_width = window.inner_width().ok().and_then(|v| v.as_f64()).unwrap_or(width);
let client_height = window.inner_height().ok().and_then(|v| v.as_f64()).unwrap_or(height);
for tile in &visible_tiles {
if let Some(ways) = state.ways.get(tile) {
for way in ways {
// Check if road has a name
let name: Option<&str> = way.tags.get("name").map(|s| s.as_str());
let highway: Option<&str> = way.tags.get("highway").map(|s| s.as_str());
if let (Some(name), Some(highway_type)) = (name, highway) {
// Skip unnamed or minor roads
if name.is_empty() { continue; }
// Zoom filtering
let min_zoom = match highway_type {
"motorway" | "trunk" => 200.0,
"primary" => 500.0,
"secondary" => 1500.0,
"tertiary" => 3000.0,
"residential" | "unclassified" => 6000.0,
_ => 10000.0,
};
if zoom < min_zoom { continue; }
// Priority based on road type
let priority: i32 = match highway_type {
"motorway" | "trunk" => 50,
"primary" => 40,
"secondary" => 30,
"tertiary" => 20,
_ => 10,
};
// Parse road points to find midpoint and angle
let points = &way.points;
if points.len() < 16 { continue; }
let mut parsed_points: Vec<[f64; 2]> = Vec::new();
for chunk in points.chunks(8) {
if chunk.len() < 8 { break; }
let lat = f32::from_le_bytes(chunk[0..4].try_into().unwrap_or([0u8; 4])) as f64;
let lon = f32::from_le_bytes(chunk[4..8].try_into().unwrap_or([0u8; 4])) as f64;
parsed_points.push([lat, lon]);
}
if parsed_points.len() < 2 { continue; }
// Calculate midpoint
let mid_idx = parsed_points.len() / 2;
let mid_point = parsed_points[mid_idx];
// Calculate angle from road direction
let p1 = if mid_idx > 0 { parsed_points[mid_idx - 1] } else { parsed_points[0] };
let p2 = if mid_idx + 1 < parsed_points.len() { parsed_points[mid_idx + 1] } else { parsed_points[mid_idx] };
let (x1, y1) = project(p1[0], p1[1]);
let (x2, y2) = project(p2[0], p2[1]);
let dx = x2 - x1;
let dy = -(y2 - y1);
let mut angle_deg = (dy as f64).atan2(dx as f64).to_degrees();
// Keep text readable
if angle_deg > 90.0 { angle_deg -= 180.0; }
if angle_deg < -90.0 { angle_deg += 180.0; }
// Project midpoint to screen
let (mx, my) = project(mid_point[0], mid_point[1]);
let cx = mx * uniforms.params[0] + uniforms.params[2];
let cy = my * uniforms.params[1] + uniforms.params[3];
// Clip check
if cx < -1.5 || cx > 1.5 || cy < -1.5 || cy > 1.5 { continue; }
let css_x = (cx as f64 + 1.0) * 0.5 * client_width;
let css_y = (1.0 - cy as f64) * 0.5 * client_height;
candidates.push(LabelCandidate {
name: name.to_string(),
x: css_x,
y: css_y,
priority,
is_country: false,
rotation: angle_deg,
label_type: LabelType::Street,
category: "street".to_string(),
});
}
}
}
}
// 4. Sort by Priority (High to Low)
candidates.sort_by(|a, b| b.priority.cmp(&a.priority));
// 5. Collision Detection & Placement
let mut placed_rects: Vec<(f64, f64, f64, f64)> = Vec::new();
for candidate in candidates {
// Estimate dimensions based on label type
let (est_w, est_h) = match candidate.label_type {
LabelType::Country => (candidate.name.len() as f64 * 12.0 + 20.0, 24.0),
LabelType::City => (candidate.name.len() as f64 * 8.0 + 10.0, 16.0),
LabelType::Street => (candidate.name.len() as f64 * 6.0 + 8.0, 12.0),
LabelType::Poi => (candidate.name.len() as f64 * 6.5 + 10.0, 14.0),
};
// Centered label
let rect_x = candidate.x - est_w / 2.0;
let rect_y = candidate.y - est_h / 2.0;
// Check collision
let mut collision = false;
for (px, py, pw, ph) in &placed_rects {
let padding = if candidate.label_type == LabelType::Street { 12.0 } else { 20.0 };
if rect_x < px + pw + padding &&
rect_x + est_w + padding > *px &&
rect_y < py + ph + padding &&
rect_y + est_h + padding > *py {
collision = true;
break;
}
}
if !collision {
placed_rects.push((rect_x, rect_y, est_w, est_h));
let div = document.create_element("div").unwrap();
let class_name = match candidate.label_type {
LabelType::Country => "label label-country".to_string(),
LabelType::City => "label label-city".to_string(),
LabelType::Street => "label label-street".to_string(),
LabelType::Poi => format!("label label-poi label-poi-{}", candidate.category),
};
div.set_class_name(&class_name);
div.set_text_content(Some(&candidate.name));
let div_html: web_sys::HtmlElement = div.dyn_into().unwrap();
let style = div_html.style();
style.set_property("left", &format!("{}px", candidate.x)).unwrap();
style.set_property("top", &format!("{}px", candidate.y)).unwrap();
let transform = match candidate.label_type {
LabelType::Poi => "translate(-50%, 10px)",
LabelType::Street if candidate.rotation.abs() > 0.5 => &format!("translate(-50%, -50%) rotate({}deg)", candidate.rotation),
_ => "translate(-50%, -50%)"
};
style.set_property("transform", transform).unwrap();
container.append_child(&div_html).unwrap();
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,95 @@
//! Building render pipeline
use super::common::Vertex;
pub fn create_building_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(
model: VertexInput,
) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
// Globe Effect: Spherize
// let r2 = x*x + y*y;
// let w = 1.0 + r2 * 0.5;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
// Buildings: Light: #d9d9d9 (0.85), Dark: #333333 (0.2)
let is_dark = camera.theme.x;
let color = mix(vec3<f32>(0.85, 0.85, 0.85), vec3<f32>(0.2, 0.2, 0.2), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Building Pipeline Layout"),
bind_group_layouts: &[bind_group_layout],
push_constant_ranges: &[],
});
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: None,
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: &[
Vertex::desc(),
],
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: *format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: None,
unclipped_depth: false,
polygon_mode: wgpu::PolygonMode::Fill,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState::default(),
multiview: None,
})
}

View File

@@ -0,0 +1,71 @@
//! Common pipeline utilities and vertex types
/// GPU vertex with 2D position
#[repr(C)]
#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]
pub struct Vertex {
pub position: [f32; 2],
}
impl Vertex {
pub fn desc() -> wgpu::VertexBufferLayout<'static> {
wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &[
wgpu::VertexAttribute {
offset: 0,
shader_location: 0,
format: wgpu::VertexFormat::Float32x2,
}
]
}
}
}
/// Create a simple render pipeline with standard configuration
pub fn create_simple_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout,
shader: &wgpu::ShaderModule,
label: &str,
topology: wgpu::PrimitiveTopology,
) -> wgpu::RenderPipeline {
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some(label),
bind_group_layouts: &[bind_group_layout],
push_constant_ranges: &[],
});
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some(label),
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: shader,
entry_point: "vs_main",
buffers: &[Vertex::desc()],
},
fragment: Some(wgpu::FragmentState {
module: shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: *format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
primitive: wgpu::PrimitiveState {
topology,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: None,
unclipped_depth: false,
polygon_mode: wgpu::PolygonMode::Fill,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState::default(),
multiview: None,
})
}

View File

@@ -0,0 +1,126 @@
//! Landuse render pipelines (green, residential, sand)
use super::common::create_simple_pipeline;
pub fn create_landuse_green_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Green: Light #cdebb0, Dark #2d4a2d
let color = mix(vec3<f32>(0.80, 0.92, 0.69), vec3<f32>(0.18, 0.29, 0.18), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Green Landuse Pipeline", wgpu::PrimitiveTopology::TriangleList)
}
pub fn create_landuse_residential_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Light: #e0dfdf, Dark: #1a1a1a (very dark grey for residential areas)
let color = mix(vec3<f32>(0.88, 0.87, 0.87), vec3<f32>(0.1, 0.1, 0.1), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Residential Landuse Pipeline", wgpu::PrimitiveTopology::TriangleList)
}
pub fn create_sand_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Sand: #e6d5ac (Light), Dark Sand: #5c5545 (Dark)
let color = mix(vec3<f32>(0.90, 0.83, 0.67), vec3<f32>(0.36, 0.33, 0.27), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Sand Pipeline", wgpu::PrimitiveTopology::TriangleList)
}

View File

@@ -0,0 +1,21 @@
//! Render pipeline modules for different map features
pub mod common;
pub mod building;
pub mod water;
pub mod roads;
pub mod landuse;
pub mod railway;
pub use common::{Vertex, create_simple_pipeline};
pub use building::create_building_pipeline;
pub use water::{create_water_pipeline, create_water_line_pipeline};
pub use roads::{
create_road_motorway_pipeline,
create_road_primary_pipeline,
create_road_secondary_pipeline,
create_road_residential_pipeline,
create_road_mesh,
};
pub use landuse::{create_landuse_green_pipeline, create_landuse_residential_pipeline, create_sand_pipeline};
pub use railway::create_railway_pipeline;

View File

@@ -0,0 +1,95 @@
//! Railway render pipeline
use super::common::Vertex;
pub fn create_railway_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(
model: VertexInput,
) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
// Globe Effect: Spherize
// let r2 = x*x + y*y;
// let w = 1.0 + r2 * 0.5;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Light: #808080 (grey), Dark: #5a5a5a (darker grey)
let color = mix(vec3<f32>(0.5, 0.5, 0.5), vec3<f32>(0.35, 0.35, 0.35), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Railway Pipeline Layout"),
bind_group_layouts: &[bind_group_layout],
push_constant_ranges: &[],
});
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: None,
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: &[
Vertex::desc(),
],
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: *format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::LineList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: None,
unclipped_depth: false,
polygon_mode: wgpu::PolygonMode::Fill,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState::default(),
multiview: None,
})
}

View File

@@ -0,0 +1,214 @@
//! Road render pipelines (motorway, primary, secondary, residential)
use super::common::{Vertex, create_simple_pipeline};
use crate::geo::project;
/// Create road mesh geometry (thick lines as triangles)
#[allow(dead_code)]
pub fn create_road_mesh(points: &[[f64; 2]], width: f32) -> Vec<Vertex> {
let mut vertices = Vec::new();
if points.len() < 2 { return vertices; }
for i in 0..points.len() - 1 {
let p1 = points[i];
let p2 = points[i+1];
// Convert to projected coordinates (0..1)
let (x1, y1) = project(p1[0], p1[1]);
let (x2, y2) = project(p2[0], p2[1]);
let dx = x2 - x1;
let dy = y2 - y1;
let len = (dx * dx + dy * dy).sqrt();
// Skip invalid segments:
// 1. Very short segments that would create degenerate geometry
// 2. Segments where width > length (creates giant rectangles instead of roads)
if len < 0.00001 || len < (width * 2.0) as f32 { continue; }
// Normal vector scaled by width
let nx = -dy / len * width;
let ny = dx / len * width;
// 4 corners
let v1 = Vertex { position: [x1 + nx, y1 + ny] };
let v2 = Vertex { position: [x1 - nx, y1 - ny] };
let v3 = Vertex { position: [x2 + nx, y2 + ny] };
let v4 = Vertex { position: [x2 - nx, y2 - ny] };
// Triangle 1
vertices.push(v1);
vertices.push(v2);
vertices.push(v3);
// Triangle 2
vertices.push(v2);
vertices.push(v4);
vertices.push(v3);
}
vertices
}
pub fn create_road_motorway_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Light: #e990a0, Dark: #d97080 (slightly darker/richer)
let color = mix(vec3<f32>(0.91, 0.56, 0.63), vec3<f32>(0.85, 0.44, 0.50), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Motorway Pipeline", wgpu::PrimitiveTopology::LineList)
}
pub fn create_road_primary_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Light: #fdbf6f, Dark: #e09f3f
let color = mix(vec3<f32>(0.99, 0.75, 0.44), vec3<f32>(0.88, 0.62, 0.25), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Primary Road Pipeline", wgpu::PrimitiveTopology::LineList)
}
pub fn create_road_secondary_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Light: White, Dark: #444444
let color = mix(vec3<f32>(1.0, 1.0, 1.0), vec3<f32>(0.27, 0.27, 0.27), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Secondary Road Pipeline", wgpu::PrimitiveTopology::LineList)
}
pub fn create_road_residential_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Light: White, Dark: #333333
let color = mix(vec3<f32>(1.0, 1.0, 1.0), vec3<f32>(0.2, 0.2, 0.2), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Residential Road Pipeline", wgpu::PrimitiveTopology::LineList)
}

View File

@@ -0,0 +1,146 @@
//! Water render pipelines
use super::common::{Vertex, create_simple_pipeline};
pub fn create_water_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(
model: VertexInput,
) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
// Globe Effect: Spherize
// let r2 = x*x + y*y;
// let w = 1.0 + r2 * 0.5;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
// Water: Light: #9ecaff, Dark: #1a2639
let is_dark = camera.theme.x;
let color = mix(vec3<f32>(0.62, 0.79, 1.0), vec3<f32>(0.1, 0.15, 0.22), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
let render_pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Water Pipeline Layout"),
bind_group_layouts: &[bind_group_layout],
push_constant_ranges: &[],
});
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Water Pipeline"),
layout: Some(&render_pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: &[
wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &[
wgpu::VertexAttribute {
offset: 0,
shader_location: 0,
format: wgpu::VertexFormat::Float32x2,
}
],
}
],
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: *format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: None,
unclipped_depth: false,
polygon_mode: wgpu::PolygonMode::Fill,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState::default(),
multiview: None,
})
}
pub fn create_water_line_pipeline(
device: &wgpu::Device,
format: &wgpu::TextureFormat,
bind_group_layout: &wgpu::BindGroupLayout
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: None,
source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(r#"
struct CameraUniform {
params: vec4<f32>,
theme: vec4<f32>,
};
@group(0) @binding(0)
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec2<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
};
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
let world_pos = model.position;
let x = world_pos.x * camera.params.x + camera.params.z;
let y = world_pos.y * camera.params.y + camera.params.w;
out.clip_position = vec4<f32>(x, y, 0.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let is_dark = camera.theme.x;
// Light: #a5bfdd (same/similar to water), Dark: #4a6fa5
let color = mix(vec3<f32>(0.66, 0.82, 0.96), vec3<f32>(0.29, 0.44, 0.65), is_dark);
return vec4<f32>(color, 1.0);
}
"#)),
});
create_simple_pipeline(device, format, bind_group_layout, &shader, "Water Line Pipeline", wgpu::PrimitiveTopology::LineList)
}

48
frontend/src/state.rs Normal file
View File

@@ -0,0 +1,48 @@
//! Application state management
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use crate::types::{MapNode, MapWay, TileBuffers};
use crate::geo::KalmanFilter;
/// Global application state
pub struct AppState {
pub nodes: HashMap<(i32, i32, i32), Vec<MapNode>>,
pub ways: HashMap<(i32, i32, i32), Vec<MapWay>>,
pub buildings: HashMap<(i32, i32, i32), Vec<MapWay>>,
pub landuse: HashMap<(i32, i32, i32), Vec<MapWay>>,
pub water: HashMap<(i32, i32, i32), Vec<MapWay>>,
pub railways: HashMap<(i32, i32, i32), Vec<MapWay>>,
pub buffers: HashMap<(i32, i32, i32), Arc<TileBuffers>>,
pub loaded_tiles: HashSet<(i32, i32, i32)>,
pub pending_tiles: HashSet<(i32, i32, i32)>,
pub user_location: Option<(f64, f64)>,
pub kalman_filter: Option<KalmanFilter>,
pub watch_id: Option<i32>,
}
impl AppState {
pub fn new() -> Self {
Self {
nodes: HashMap::new(),
ways: HashMap::new(),
buildings: HashMap::new(),
landuse: HashMap::new(),
water: HashMap::new(),
railways: HashMap::new(),
buffers: HashMap::new(),
loaded_tiles: HashSet::new(),
pending_tiles: HashSet::new(),
user_location: None,
kalman_filter: None,
watch_id: None,
}
}
}
impl Default for AppState {
fn default() -> Self {
Self::new()
}
}

102
frontend/src/tiles.rs Normal file
View File

@@ -0,0 +1,102 @@
//! Tile visibility and data fetching utilities
use wasm_bindgen::JsCast;
use crate::camera::Camera;
/// Fetch tile data with caching
pub async fn fetch_cached(url: &str) -> Option<Vec<u8>> {
let window = web_sys::window()?;
let caches = window.caches().ok()?;
let cache_name = "map-data-v5-sand";
let cache = wasm_bindgen_futures::JsFuture::from(caches.open(cache_name)).await.ok()?;
let cache: web_sys::Cache = cache.dyn_into().ok()?;
let request = web_sys::Request::new_with_str(url).ok()?;
let match_promise = cache.match_with_request(&request);
let match_val = wasm_bindgen_futures::JsFuture::from(match_promise).await.ok()?;
if !match_val.is_undefined() {
let response: web_sys::Response = match_val.dyn_into().ok()?;
let buffer_promise = response.array_buffer().ok()?;
let buffer = wasm_bindgen_futures::JsFuture::from(buffer_promise).await.ok()?;
let array = js_sys::Uint8Array::new(&buffer);
return Some(array.to_vec());
}
// Network fetch
let response_val = wasm_bindgen_futures::JsFuture::from(window.fetch_with_request(&request)).await.ok()?;
let response: web_sys::Response = response_val.dyn_into().ok()?;
// Clone response for cache
let response_clone = response.clone().ok()?;
let put_promise = cache.put_with_request(&request, &response_clone);
wasm_bindgen_futures::JsFuture::from(put_promise).await.ok()?;
let buffer_promise = response.array_buffer().ok()?;
let buffer = wasm_bindgen_futures::JsFuture::from(buffer_promise).await.ok()?;
let array = js_sys::Uint8Array::new(&buffer);
Some(array.to_vec())
}
/// Get visible tiles based on current camera position
pub fn get_visible_tiles(camera: &Camera) -> Vec<(i32, i32, i32)> {
// Select zoom level based on camera zoom
// Zoom 6: World/Country view
// Zoom 9: Region view
// Zoom 12: City view
// Zoom 14: Street view
let z = if camera.zoom < 100.0 {
2
} else if camera.zoom < 500.0 {
4
} else if camera.zoom < 2000.0 {
6
} else if camera.zoom < 5000.0 {
9
} else if camera.zoom < 10000.0 {
12
} else {
14
};
let n = 2.0f64.powi(z);
let half_width = 1.0 * camera.aspect / camera.zoom;
let half_height = 1.0 / camera.zoom;
let min_x = (camera.x - half_width).max(0.0) as f64;
let max_x = (camera.x + half_width).min(1.0) as f64;
let min_y = (camera.y - half_height).max(0.0) as f64;
let max_y = (camera.y + half_height).min(1.0) as f64;
let min_tile_x = (min_x * n).floor() as i32;
let max_tile_x = (max_x * n).floor() as i32;
let min_tile_y = (min_y * n).floor() as i32;
let max_tile_y = (max_y * n).floor() as i32;
let mut tiles = Vec::new();
for x in min_tile_x..=max_tile_x {
for y in min_tile_y..=max_tile_y {
tiles.push((z, x, y));
}
}
tiles
}
/// Get parent tile for the tile retention hierarchy
pub fn get_parent_tile(z: i32, x: i32, y: i32) -> Option<(i32, i32, i32)> {
// Hierarchy: 14 -> 12 -> 9 -> 6 -> 2
let parent_z = match z {
14 => 12,
12 => 9,
9 => 6,
6 => 4,
4 => 2,
_ => return None,
};
// Calculate scale difference
let diff = z - parent_z;
let factor = 2i32.pow(diff as u32);
Some((parent_z, x / factor, y / factor))
}

68
frontend/src/types.rs Normal file
View File

@@ -0,0 +1,68 @@
//! Data types for map features and tile data
use serde::Deserialize;
use std::collections::HashMap;
/// A map node (point feature with tags)
#[allow(dead_code)]
#[derive(Deserialize, Debug, Clone)]
pub struct MapNode {
pub id: i64,
pub lat: f64,
pub lon: f64,
pub tags: HashMap<String, String>,
}
/// A map way (line/polygon feature with tags and geometry)
#[allow(dead_code)]
#[derive(Deserialize, Debug, Clone)]
pub struct MapWay {
pub id: i64,
pub tags: HashMap<String, String>,
pub points: Vec<u8>,
}
/// Combined tile data from the backend
#[allow(dead_code)]
#[derive(Deserialize, Debug, Clone)]
pub struct TileData {
pub nodes: Vec<MapNode>,
pub ways: Vec<MapWay>,
pub buildings: Vec<MapWay>,
pub landuse: Vec<MapWay>,
pub water: Vec<MapWay>,
pub railways: Vec<MapWay>,
}
/// GPU buffers for a single tile's geometry
#[allow(dead_code)]
pub struct TileBuffers {
// Road Buffers
pub road_motorway_vertex_buffer: wgpu::Buffer,
pub road_motorway_vertex_count: u32,
pub road_primary_vertex_buffer: wgpu::Buffer,
pub road_primary_vertex_count: u32,
pub road_secondary_vertex_buffer: wgpu::Buffer,
pub road_secondary_vertex_count: u32,
pub road_residential_vertex_buffer: wgpu::Buffer,
pub road_residential_vertex_count: u32,
pub building_vertex_buffer: wgpu::Buffer,
pub building_index_count: u32,
// Landuse Buffers
pub landuse_green_vertex_buffer: wgpu::Buffer,
pub landuse_green_index_count: u32,
pub landuse_residential_vertex_buffer: wgpu::Buffer,
pub landuse_residential_index_count: u32,
pub landuse_sand_vertex_buffer: wgpu::Buffer,
pub landuse_sand_index_count: u32,
pub water_vertex_buffer: wgpu::Buffer,
pub water_index_count: u32,
pub railway_vertex_buffer: wgpu::Buffer,
pub railway_vertex_count: u32,
pub water_line_vertex_buffer: wgpu::Buffer,
pub water_line_vertex_count: u32,
}