//! Frontend for the maps application //! //! This crate provides a WebAssembly-based map renderer using wgpu. // Module declarations // Module declarations pub mod types; pub mod geo; pub mod labels; pub mod pipelines; // New architecture pub mod domain; pub mod repositories; pub mod services; // Re-exports/Imports use std::sync::{Arc, Mutex}; use std::collections::HashSet; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use winit::{ event::{Event, WindowEvent}, event_loop::EventLoop, window::WindowBuilder, platform::web::WindowExtWebSys, }; use wgpu::util::DeviceExt; use crate::domain::camera::Camera; use crate::domain::state::{AppState, InputState}; use crate::services::tile_service::TileService; use crate::services::camera_service::CameraService; use crate::services::render_service::RenderService; use crate::services::transit_service::TransitService; use crate::repositories::http_client::HttpClient; use crate::geo::project; use crate::labels::update_labels; use crate::types::TileData; #[wasm_bindgen(start)] pub async fn run() { std::panic::set_hook(Box::new(console_error_panic_hook::hook)); let _ = console_log::init_with_level(log::Level::Warn); let event_loop = EventLoop::new().unwrap(); let window = Arc::new(WindowBuilder::new().build(&event_loop).unwrap()); // Canvas setup let win = web_sys::window().unwrap(); let window_doc = win.document().unwrap(); let body = window_doc.body().unwrap(); if let Some(canvas) = window.canvas() { body.append_child(&canvas).unwrap(); } // Initialize WGPU let instance = wgpu::Instance::default(); let surface = instance.create_surface(window.clone()).unwrap(); let adapter = instance.request_adapter(&wgpu::RequestAdapterOptions { power_preference: wgpu::PowerPreference::default(), compatible_surface: Some(&surface), force_fallback_adapter: false, }).await.unwrap(); let (device, queue) = adapter.request_device( &wgpu::DeviceDescriptor { label: None, required_features: wgpu::Features::empty(), required_limits: wgpu::Limits::downlevel_webgl2_defaults(), }, None, ).await.unwrap(); // Initial Config let dpr = win.device_pixel_ratio(); let inner_width = win.inner_width().unwrap().as_f64().unwrap(); let inner_height = win.inner_height().unwrap().as_f64().unwrap(); let width = (inner_width * dpr) as u32; let height = (inner_height * dpr) as u32; let max_dim = device.limits().max_texture_dimension_2d.min(2048); let width = width.max(1).min(max_dim); let height = height.max(1).min(max_dim); if let Some(canvas) = window.canvas() { canvas.set_width(width); canvas.set_height(height); } let mut config = surface.get_default_config(&adapter, width, height).unwrap(); surface.configure(&device, &config); // MSAA let mut msaa_texture = device.create_texture(&wgpu::TextureDescriptor { label: Some("Multisampled Texture"), size: wgpu::Extent3d { width, height, depth_or_array_layers: 1 }, mip_level_count: 1, sample_count: 4, dimension: wgpu::TextureDimension::D2, format: config.format, usage: wgpu::TextureUsages::RENDER_ATTACHMENT, view_formats: &[], }); let mut msaa_view = msaa_texture.create_view(&wgpu::TextureViewDescriptor::default()); // Domain State - Initial view centered on Munich (lat 48.1351, lon 11.5820) let camera = Arc::new(Mutex::new(Camera { x: 0.5322, // Munich longitude in Mercator y: 0.3195, // Munich latitude in Mercator zoom: 4000.0, // Good city-level zoom aspect: width as f32 / height as f32, })); let mut input = InputState::new(); let state = Arc::new(Mutex::new(AppState::new())); TransitService::set_global_state(state.clone()); // Label camera tracking let mut last_label_camera: (f32, f32, f32) = (0.0, 0.0, 0.0); // (x, y, zoom) // Camera Buffer let is_dark_init = win.document() .and_then(|d| d.document_element()) .map(|e| e.get_attribute("data-theme").unwrap_or_default() == "dark") .unwrap_or(false); let camera_uniform = camera.lock().unwrap().to_uniform(is_dark_init, false); let camera_buffer = device.create_buffer_init( &wgpu::util::BufferInitDescriptor { label: Some("Camera Buffer"), contents: bytemuck::cast_slice(&[camera_uniform]), usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, } ); let camera_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { entries: &[ wgpu::BindGroupLayoutEntry { binding: 0, visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Buffer { ty: wgpu::BufferBindingType::Uniform, has_dynamic_offset: false, min_binding_size: None, }, count: None, } ], label: Some("camera_bind_group_layout"), }); let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { layout: &camera_bind_group_layout, entries: &[ wgpu::BindGroupEntry { binding: 0, resource: camera_buffer.as_entire_binding(), } ], label: Some("camera_bind_group"), }); // Render Service (Pipelines) let render_service = RenderService::new(&device, &config.format, &camera_bind_group_layout); // UI Bindings (Zoom slider, buttons) { let camera_clone = camera.clone(); let window_clone = window.clone(); if let Some(slider) = window_doc.get_element_by_id("zoom-slider").and_then(|e| e.dyn_into::().ok()) { let closure = wasm_bindgen::closure::Closure::::new(move |event: web_sys::Event| { let input = event.target().unwrap().dyn_into::().unwrap(); let val = input.value().parse::().unwrap_or(50.0); let new_zoom = CameraService::slider_to_zoom(val); camera_clone.lock().unwrap().zoom = new_zoom; window_clone.request_redraw(); }); slider.add_event_listener_with_callback("input", closure.as_ref().unchecked_ref()).unwrap(); closure.forget(); } let camera_clone = camera.clone(); let window_clone = window.clone(); if let Some(btn) = window_doc.get_element_by_id("btn-zoom-in").and_then(|e| e.dyn_into::().ok()) { let closure = wasm_bindgen::closure::Closure::::new(move || { let mut cam = camera_clone.lock().unwrap(); let current_slider = CameraService::zoom_to_slider(cam.zoom); let new_slider = (current_slider + 5.0).min(100.0); cam.zoom = CameraService::slider_to_zoom(new_slider); window_clone.request_redraw(); }); btn.set_onclick(Some(closure.as_ref().unchecked_ref())); closure.forget(); } let camera_clone = camera.clone(); let window_clone = window.clone(); if let Some(btn) = window_doc.get_element_by_id("btn-zoom-out").and_then(|e| e.dyn_into::().ok()) { let closure = wasm_bindgen::closure::Closure::::new(move || { let mut cam = camera_clone.lock().unwrap(); let current_slider = CameraService::zoom_to_slider(cam.zoom); let new_slider = (current_slider - 5.0).max(0.0); cam.zoom = CameraService::slider_to_zoom(new_slider); window_clone.request_redraw(); }); btn.set_onclick(Some(closure.as_ref().unchecked_ref())); closure.forget(); } // Transit button let state_clone = state.clone(); let window_clone = window.clone(); if let Some(btn) = window_doc.get_element_by_id("btn-transport").and_then(|e| e.dyn_into::().ok()) { let closure = wasm_bindgen::closure::Closure::::new(move || { let mut s = state_clone.lock().unwrap(); s.show_transit = !s.show_transit; let new_state = s.show_transit; drop(s); web_sys::console::log_1(&format!("Transit mode: {}", new_state).into()); window_clone.request_redraw(); }); btn.set_onclick(Some(closure.as_ref().unchecked_ref())); closure.forget(); } // Location logic moved to closure to avoid complex refactoring right now, but using AppState let state_clone = state.clone(); let camera_clone = camera.clone(); let window_clone = window.clone(); if let Some(btn) = window_doc.get_element_by_id("btn-location").and_then(|e| e.dyn_into::().ok()) { // ... (geolocation logic omitted for brevity in this task, assuming safe to simplify or copy) // For now, I'll assume users want the functionality. // I'll copy the location logic logic from the read file to maintain feature parity. let closure = wasm_bindgen::closure::Closure::::new(move || { let window = web_sys::window().unwrap(); let navigator = window.navigator(); let geolocation = navigator.geolocation().unwrap(); let mut state_guard = state_clone.lock().unwrap(); if let Some(id) = state_guard.watch_id { geolocation.clear_watch(id); state_guard.watch_id = None; state_guard.kalman_filter = None; state_guard.user_location = None; window_clone.request_redraw(); return; } drop(state_guard); let camera_clone2 = camera_clone.clone(); let window_clone2 = window_clone.clone(); let state_clone2 = state_clone.clone(); let success_callback = wasm_bindgen::closure::Closure::::new(move |position: web_sys::Position| { let coords = position.coords(); let mut state_guard = state_clone2.lock().unwrap(); state_guard.user_location = Some((coords.latitude(), coords.longitude())); drop(state_guard); let (x, y) = project(coords.latitude(), coords.longitude()); let mut cam = camera_clone2.lock().unwrap(); cam.x = x; cam.y = y; // Zoom to street level (8000.0 is good for navigation) cam.zoom = 8000.0; drop(cam); web_sys::console::log_1(&format!("Location updated: lat={}, lon={}", coords.latitude(), coords.longitude()).into()); window_clone2.request_redraw(); }); let error_callback = wasm_bindgen::closure::Closure::::new(move |error: web_sys::PositionError| { web_sys::console::error_1(&format!("Geolocation error: {:?}", error.message()).into()); }); let options = web_sys::PositionOptions::new(); let watch_id = geolocation.watch_position_with_error_callback_and_options( success_callback.as_ref().unchecked_ref(), Some(error_callback.as_ref().unchecked_ref()), &options ).unwrap(); state_clone.lock().unwrap().watch_id = Some(watch_id); success_callback.forget(); error_callback.forget(); }); btn.set_onclick(Some(closure.as_ref().unchecked_ref())); closure.forget(); } } // Event Loop event_loop.run(move |event, elwt| { elwt.set_control_flow(winit::event_loop::ControlFlow::Wait); match event { Event::WindowEvent { event, .. } => match event { WindowEvent::Resized(_) => { let win = web_sys::window().unwrap(); let dpr = win.device_pixel_ratio(); let inner_width = win.inner_width().unwrap().as_f64().unwrap(); let inner_height = win.inner_height().unwrap().as_f64().unwrap(); let max_dim = device.limits().max_texture_dimension_2d.min(2048); let width = (inner_width * dpr).max(1.0).min(max_dim as f64) as u32; let height = (inner_height * dpr).max(1.0).min(max_dim as f64) as u32; if let Some(canvas) = window.canvas() { canvas.set_width(width); canvas.set_height(height); } config.width = width; config.height = height; surface.configure(&device, &config); // Recreate MSAA texture for the new size msaa_texture = device.create_texture(&wgpu::TextureDescriptor { label: Some("Multisampled Texture"), size: wgpu::Extent3d { width, height, depth_or_array_layers: 1 }, mip_level_count: 1, sample_count: 4, dimension: wgpu::TextureDimension::D2, format: config.format, usage: wgpu::TextureUsages::RENDER_ATTACHMENT, view_formats: &[], }); msaa_view = msaa_texture.create_view(&wgpu::TextureViewDescriptor::default()); CameraService::handle_resize(&camera, width, height); window.request_redraw(); } WindowEvent::MouseInput { state: button_state, button, .. } => { CameraService::handle_mouse_input(&mut input, button_state, button); } WindowEvent::CursorMoved { position, .. } => { CameraService::handle_cursor_move(&camera, &mut input, position.x, position.y, config.height as f32); window.request_redraw(); } WindowEvent::MouseWheel { delta, .. } => { CameraService::handle_wheel(&camera, delta); window.request_redraw(); } WindowEvent::RedrawRequested => { let camera_guard = camera.lock().unwrap(); let visible_tiles = TileService::get_visible_tiles(&camera_guard); drop(camera_guard); // Tile Fetching orchestration let mut needs_fetch = Vec::new(); let mut tiles_to_render_set = HashSet::new(); { let mut state_guard = state.lock().unwrap(); for &tile in &visible_tiles { tiles_to_render_set.insert(tile); let mut current_tile = tile; while let Some(parent) = TileService::get_parent_tile(current_tile.0, current_tile.1, current_tile.2) { tiles_to_render_set.insert(parent); current_tile = parent; } if !state_guard.loaded_tiles.contains(&tile) && !state_guard.pending_tiles.contains(&tile) { state_guard.pending_tiles.insert(tile); needs_fetch.push(tile); } } } // Process Buffers (Delegated to RenderService via helper or direct call? Logic was inline) // We need to access state to create buffers. { let mut state_guard = state.lock().unwrap(); let tiles_to_process: Vec<(i32, i32, i32)> = state_guard.loaded_tiles.iter() .filter(|tile| !state_guard.buffers.contains_key(*tile)) .cloned() .collect(); for tile in tiles_to_process { // Call RenderService static helper? Or just logic. // I put logic in RenderService::create_tile_buffers which takes state. RenderService::create_tile_buffers(&device, &mut state_guard, tile); } } // Fetching for (z, x, y) in needs_fetch { let state_clone = state.clone(); let window_clone = window.clone(); wasm_bindgen_futures::spawn_local(async move { let url = format!("/api/tiles/{}/{}/{}/all", z, x, y); let tile_data = if let Some(bytes) = HttpClient::fetch_cached(&url).await { bincode::deserialize::(&bytes).ok() } else { None }; if let Some(data) = tile_data { // Pre-compute labels from tile data (expensive, but only once per tile) let labels = crate::labels::extract_labels(&data); let mut guard = state_clone.lock().unwrap(); guard.nodes.insert((z, x, y), data.nodes); guard.ways.insert((z, x, y), data.ways); guard.buildings.insert((z, x, y), data.buildings); guard.landuse.insert((z, x, y), data.landuse); guard.water.insert((z, x, y), data.water); guard.railways.insert((z, x, y), data.railways); guard.tile_labels.insert((z, x, y), labels); guard.loaded_tiles.insert((z, x, y)); guard.pending_tiles.remove(&(z, x, y)); window_clone.request_redraw(); } else { let mut guard = state_clone.lock().unwrap(); guard.pending_tiles.remove(&(z, x, y)); } }); } // Collect Buffers let mut tiles_to_render_vec: Vec<(i32, i32, i32)> = tiles_to_render_set.into_iter().collect(); tiles_to_render_vec.sort_by_key(|(z, _, _)| *z); let mut tiles_to_render = Vec::new(); { let state_guard = state.lock().unwrap(); for tile in &tiles_to_render_vec { if let Some(buffers) = state_guard.buffers.get(tile) { tiles_to_render.push(buffers.clone()); } } } // Render let is_dark = web_sys::window() .and_then(|w| w.document()) .and_then(|d| d.document_element()) .map(|e| e.get_attribute("data-theme").unwrap_or_default() == "dark") .unwrap_or(false); let show_transit = state.lock().unwrap().show_transit; let camera_uniform_data = camera.lock().unwrap().to_uniform(is_dark, show_transit); queue.write_buffer(&camera_buffer, 0, bytemuck::cast_slice(&[camera_uniform_data])); let frame = surface.get_current_texture().unwrap(); let view = frame.texture.create_view(&wgpu::TextureViewDescriptor::default()); let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None }); { let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { label: None, color_attachments: &[Some(wgpu::RenderPassColorAttachment { view: &msaa_view, resolve_target: Some(&view), ops: wgpu::Operations { load: wgpu::LoadOp::Clear(wgpu::Color { r: if is_dark { 0.05 } else { 0.961 }, // #F5F4F0 cream g: if is_dark { 0.05 } else { 0.957 }, b: if is_dark { 0.05 } else { 0.941 }, a: 1.0, }), store: wgpu::StoreOp::Discard, }, })], depth_stencil_attachment: None, timestamp_writes: None, occlusion_query_set: None, }); rpass.set_pipeline(&render_service.landuse_residential_pipeline); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.landuse_residential_index_count > 0 { rpass.set_vertex_buffer(0, buffers.landuse_residential_vertex_buffer.slice(..)); rpass.draw(0..buffers.landuse_residential_index_count, 0..1); } } rpass.set_pipeline(&render_service.landuse_green_pipeline); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.landuse_green_index_count > 0 { rpass.set_vertex_buffer(0, buffers.landuse_green_vertex_buffer.slice(..)); rpass.draw(0..buffers.landuse_green_index_count, 0..1); } } rpass.set_pipeline(&render_service.water_pipeline); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.water_index_count > 0 { rpass.set_bind_group(1, &buffers.tile_bind_group, &[]); rpass.set_vertex_buffer(0, buffers.water_vertex_buffer.slice(..)); rpass.draw(0..buffers.water_index_count, 0..1); } } rpass.set_pipeline(&render_service.water_line_pipeline); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.water_line_vertex_count > 0 { rpass.set_vertex_buffer(0, buffers.water_line_vertex_buffer.slice(..)); rpass.draw(0..buffers.water_line_vertex_count, 0..1); } } rpass.set_pipeline(&render_service.sand_pipeline); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.landuse_sand_index_count > 0 { rpass.set_vertex_buffer(0, buffers.landuse_sand_vertex_buffer.slice(..)); rpass.draw(0..buffers.landuse_sand_index_count, 0..1); } } // Roads (Fill only for now based on extracted render service logic) rpass.set_pipeline(&render_service.residential_fill); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.road_residential_vertex_count > 0 { rpass.set_vertex_buffer(0, buffers.road_residential_vertex_buffer.slice(..)); rpass.draw(0..buffers.road_residential_vertex_count, 0..1); } } rpass.set_pipeline(&render_service.secondary_fill); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.road_secondary_vertex_count > 0 { rpass.set_vertex_buffer(0, buffers.road_secondary_vertex_buffer.slice(..)); rpass.draw(0..buffers.road_secondary_vertex_count, 0..1); } } rpass.set_pipeline(&render_service.primary_fill); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.road_primary_vertex_count > 0 { rpass.set_vertex_buffer(0, buffers.road_primary_vertex_buffer.slice(..)); rpass.draw(0..buffers.road_primary_vertex_count, 0..1); } } rpass.set_pipeline(&render_service.motorway_fill); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.road_motorway_vertex_count > 0 { rpass.set_vertex_buffer(0, buffers.road_motorway_vertex_buffer.slice(..)); rpass.draw(0..buffers.road_motorway_vertex_count, 0..1); } } // Buildings (rendered before railways so transit lines appear on top) rpass.set_pipeline(&render_service.building_pipeline); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.building_index_count > 0 { rpass.set_vertex_buffer(0, buffers.building_vertex_buffer.slice(..)); rpass.draw(0..buffers.building_index_count, 0..1); } } // Railways (rendered LAST so they appear on top of roads and buildings) rpass.set_pipeline(&render_service.railway_pipeline); rpass.set_bind_group(0, &camera_bind_group, &[]); for buffers in &tiles_to_render { if buffers.railway_vertex_count > 0 { rpass.set_vertex_buffer(0, buffers.railway_vertex_buffer.slice(..)); rpass.draw(0..buffers.railway_vertex_count, 0..1); } } } queue.submit(Some(encoder.finish())); frame.present(); // Extract minimal data to avoid RefCell conflicts in winit // We hold the lock for update_labels to avoid cloning the massive nodes map let state_guard = state.lock().unwrap(); let camera_guard = camera.lock().unwrap(); // Helper struct to represent camera for update_labels let temp_camera = Camera { x: camera_guard.x, y: camera_guard.y, zoom: camera_guard.zoom, aspect: camera_guard.aspect }; // Update labels every frame for smooth dragging // Performance is maintained through reduced label limits (20/100/300/500) update_labels( &web_sys::window().unwrap(), &temp_camera, &state_guard, config.width as f64, config.height as f64, window.scale_factor() ); let user_location = state_guard.user_location; let show_transit = state_guard.show_transit; drop(camera_guard); drop(state_guard); // Update user location indicator (blue dot) if let Some((lat, lon)) = user_location { let window = web_sys::window().unwrap(); let document = window.document().unwrap(); if let Some(marker) = document.get_element_by_id("user-location") { let (x, y) = project(lat, lon); let is_dark = document.document_element().map(|e| e.get_attribute("data-theme").unwrap_or_default() == "dark").unwrap_or(false); let uniforms = temp_camera.to_uniform(is_dark, show_transit); let cx = x * uniforms.params[0] + uniforms.params[2]; let cy = y * uniforms.params[1] + uniforms.params[3]; // Convert NDC to CSS pixels let client_width = window.inner_width().ok().and_then(|v| v.as_f64()).unwrap_or(config.width as f64); let client_height = window.inner_height().ok().and_then(|v| v.as_f64()).unwrap_or(config.height as f64); let css_x = (cx as f64 + 1.0) * 0.5 * client_width; let css_y = (1.0 - cy as f64) * 0.5 * client_height; // Update marker position and visibility let _ = marker.set_attribute("style", &format!( "display: block; left: {}px; top: {}px; transform: translate(-50%, -50%);", css_x, css_y )); } } else { // Hide marker if no location let window = web_sys::window().unwrap(); let document = window.document().unwrap(); if let Some(marker) = document.get_element_by_id("user-location") { let _ = marker.set_attribute("style", "display: none;"); } } } _ => {} }, _ => {} } }).unwrap(); }