diff --git a/Cargo.lock b/Cargo.lock index 0ccfe0330..ccb48d7ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -119,7 +119,7 @@ dependencies = [ "futures-core", "futures-util", "mio", - "socket2", + "socket2 0.5.5", "tokio", "tracing", ] @@ -180,7 +180,7 @@ dependencies = [ "serde_json", "serde_urlencoded", "smallvec", - "socket2", + "socket2 0.5.5", "time", "url", ] @@ -1457,6 +1457,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + [[package]] name = "httparse" version = "1.8.0" @@ -1475,6 +1486,44 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.4.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + [[package]] name = "idna" version = "0.4.0" @@ -1547,6 +1596,12 @@ dependencies = [ "yaml-rust", ] +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + [[package]] name = "is-terminal" version = "0.4.9" @@ -1761,6 +1816,7 @@ dependencies = [ "postgres", "postgres-protocol", "regex", + "reqwest", "rustls", "rustls-native-certs", "rustls-pemfile", @@ -1774,6 +1830,7 @@ dependencies = [ "tilejson", "tokio", "tokio-postgres-rustls", + "url", ] [[package]] @@ -2208,6 +2265,7 @@ dependencies = [ "bytes", "fmmap", "hilbert_2d", + "reqwest", "serde", "serde_json", "thiserror", @@ -2549,6 +2607,46 @@ version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c707298afce11da2efef2f600116fa93ffa7a032b5d7b628aa17711ec81383ca" +[[package]] +name = "reqwest" +version = "0.11.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "system-configuration", + "tokio", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + [[package]] name = "resvg" version = "0.35.0" @@ -3018,6 +3116,16 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "socket2" version = "0.5.5" @@ -3384,6 +3492,27 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" @@ -3537,7 +3666,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.5", "tokio-macros", "windows-sys 0.48.0", ] @@ -3573,7 +3702,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand", - "socket2", + "socket2 0.5.5", "tokio", "tokio-util", "whoami", @@ -3637,6 +3766,12 @@ dependencies = [ "serde", ] +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + [[package]] name = "tracing" version = "0.1.40" @@ -3678,6 +3813,12 @@ dependencies = [ "serde_json", ] +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + [[package]] name = "ttf-parser" version = "0.18.1" @@ -3901,6 +4042,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -3932,6 +4082,18 @@ dependencies = [ "wasm-bindgen-shared", ] +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "wasm-bindgen-macro" version = "0.2.88" @@ -3971,6 +4133,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" + [[package]] name = "weezl" version = "0.1.7" @@ -4162,6 +4330,16 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 8bbd2d30b..28f822bd9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,12 +37,13 @@ martin-tile-utils = { path = "./martin-tile-utils", version = "0.1.0" } mbtiles = { path = "./mbtiles", version = "0.7.0", default-features = false } num_cpus = "1" pbf_font_tools = { version = "2.5.0", features = ["freetype"] } -pmtiles = { version = "0.3", features = ["mmap-async-tokio", "tilejson"] } +pmtiles = { version = "0.3", features = ["http-async", "mmap-async-tokio", "tilejson"] } postgis = "0.9" postgres = { version = "0.19", features = ["with-time-0_3", "with-uuid-1", "with-serde_json-1"] } postgres-protocol = "0.6" pretty_assertions = "1" regex = "1" +reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-native-roots"] } rstest = "0.18" rustls = { version = "0.21", features = ["dangerous_configuration"] } rustls-native-certs = "0.6" @@ -59,6 +60,7 @@ thiserror = "1" tilejson = "0.3" tokio = { version = "1.33.0", features = ["macros"] } tokio-postgres-rustls = "0.10" +url = "2.4" [profile.dev.package] # See https://github.com/launchbadge/sqlx#compile-time-verification @@ -67,6 +69,6 @@ sqlx-macros.opt-level = 3 insta.opt-level = 3 similar.opt-level = 3 -#[patch.crates-io] +[patch.crates-io] #sqlite-hashes = { path = "/home/nyurik/dev/rust/sqlite-hashes" } #pmtiles = { path = "../pmtiles-rs" } diff --git a/docker-compose.yml b/docker-compose.yml index 04754d296..353f4f177 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,6 +12,14 @@ services: depends_on: - db + fileserver: + image: nginx:alpine + restart: unless-stopped + ports: + - "5412:80" + volumes: + - ./tests/fixtures/pmtiles2:/usr/share/nginx/html + db-is-ready: # This should match the version of postgres used in the CI workflow image: postgis/postgis:14-3.3-alpine diff --git a/justfile b/justfile index c17c6de58..7238c9ef7 100644 --- a/justfile +++ b/justfile @@ -50,16 +50,16 @@ clean-test: rm -rf tests/output # Start a test database -start: (docker-up "db") docker-is-ready +start: start-pmtiles-server (docker-up "db") docker-is-ready # Start an ssl-enabled test database -start-ssl: (docker-up "db-ssl") docker-is-ready +start-ssl: start-pmtiles-server (docker-up "db-ssl") docker-is-ready # Start an ssl-enabled test database that requires a client certificate -start-ssl-cert: (docker-up "db-ssl-cert") docker-is-ready +start-ssl-cert: start-pmtiles-server (docker-up "db-ssl-cert") docker-is-ready # Start a legacy test database -start-legacy: (docker-up "db-legacy") docker-is-ready +start-legacy: start-pmtiles-server (docker-up "db-legacy") docker-is-ready # Start a specific test database, e.g. db or db-legacy [private] @@ -84,6 +84,10 @@ restart: stop: docker-compose down +# Start test python server for testing HTTP pmtiles +start-pmtiles-server: + docker-compose up -d fileserver + # Run benchmark tests bench: start cargo bench diff --git a/martin/Cargo.toml b/martin/Cargo.toml index 16998806a..c99b9782b 100644 --- a/martin/Cargo.toml +++ b/martin/Cargo.toml @@ -76,6 +76,7 @@ postgis.workspace = true postgres-protocol.workspace = true postgres.workspace = true regex.workspace = true +reqwest.workspace = true rustls-native-certs.workspace = true rustls-pemfile.workspace = true rustls.workspace = true @@ -89,6 +90,7 @@ thiserror.workspace = true tilejson.workspace = true tokio = { workspace = true, features = ["io-std"] } tokio-postgres-rustls.workspace = true +url.workspace = true [dev-dependencies] cargo-husky.workspace = true diff --git a/martin/src/args/root.rs b/martin/src/args/root.rs index fe15d2494..2ba4a28d4 100644 --- a/martin/src/args/root.rs +++ b/martin/src/args/root.rs @@ -2,6 +2,7 @@ use std::path::PathBuf; use clap::Parser; use log::warn; +use url::Url; use crate::args::connections::Arguments; use crate::args::environment::Env; @@ -73,11 +74,11 @@ impl Args { } if !cli_strings.is_empty() { - config.pmtiles = parse_file_args(&mut cli_strings, "pmtiles"); + config.pmtiles = parse_file_args(&mut cli_strings, "pmtiles", true); } if !cli_strings.is_empty() { - config.mbtiles = parse_file_args(&mut cli_strings, "mbtiles"); + config.mbtiles = parse_file_args(&mut cli_strings, "mbtiles", false); } if !self.meta.sprite.is_empty() { @@ -92,10 +93,29 @@ impl Args { } } -pub fn parse_file_args(cli_strings: &mut Arguments, extension: &str) -> FileConfigEnum { - let paths = cli_strings.process(|v| match PathBuf::try_from(v) { +fn is_url(s: &str, extension: &str) -> bool { + if s.starts_with("http") { + if let Ok(url) = Url::parse(s) { + if url.scheme() == "http" || url.scheme() == "https" { + if let Some(ext) = url.path().rsplit('.').next() { + return ext == extension; + } + } + } + } + false +} + +pub fn parse_file_args( + cli_strings: &mut Arguments, + extension: &str, + allow_url: bool, +) -> FileConfigEnum { + let paths = cli_strings.process(|s| match PathBuf::try_from(s) { Ok(v) => { - if v.is_dir() { + if allow_url && is_url(s, extension) { + Take(v) + } else if v.is_dir() { Share(v) } else if v.is_file() && v.extension().map_or(false, |e| e == extension) { Take(v) diff --git a/martin/src/config.rs b/martin/src/config.rs index e165b8bca..05afe4cbd 100644 --- a/martin/src/config.rs +++ b/martin/src/config.rs @@ -9,11 +9,11 @@ use futures::future::try_join_all; use serde::{Deserialize, Serialize}; use subst::VariableMap; -use crate::file_config::{resolve_files, FileConfigEnum}; +use crate::file_config::{resolve_files, resolve_files_urls, FileConfigEnum}; use crate::fonts::FontSources; use crate::mbtiles::MbtSource; use crate::pg::PgConfig; -use crate::pmtiles::PmtSource; +use crate::pmtiles::{PmtFileSource, PmtHttpSource}; use crate::source::{TileInfoSources, TileSources}; use crate::sprites::SpriteSources; use crate::srv::SrvConfig; @@ -90,7 +90,8 @@ impl Config { } async fn resolve_tile_sources(&mut self, idr: IdResolver) -> Result { - let new_pmt_src = &mut PmtSource::new_box; + let new_pmt_src = &mut PmtFileSource::new_box; + let new_pmt_url_src = &mut PmtHttpSource::new_url_box; let new_mbt_src = &mut MbtSource::new_box; let mut sources: Vec>>>> = Vec::new(); @@ -99,12 +100,14 @@ impl Config { } if !self.pmtiles.is_empty() { - let val = resolve_files(&mut self.pmtiles, idr.clone(), "pmtiles", new_pmt_src); + let cfg = &mut self.pmtiles; + let val = resolve_files_urls(cfg, idr.clone(), "pmtiles", new_pmt_src, new_pmt_url_src); sources.push(Box::pin(val)); } if !self.mbtiles.is_empty() { - let val = resolve_files(&mut self.mbtiles, idr.clone(), "mbtiles", new_mbt_src); + let cfg = &mut self.mbtiles; + let val = resolve_files(cfg, idr.clone(), "mbtiles", new_mbt_src); sources.push(Box::pin(val)); } diff --git a/martin/src/file_config.rs b/martin/src/file_config.rs index 50a57e458..71346e140 100644 --- a/martin/src/file_config.rs +++ b/martin/src/file_config.rs @@ -1,14 +1,17 @@ use std::collections::{HashMap, HashSet}; use std::future::Future; use std::mem; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use futures::TryFutureExt; use log::{info, warn}; use serde::{Deserialize, Serialize}; +use url::Url; use crate::config::{copy_unrecognized_config, UnrecognizedValues}; -use crate::file_config::FileError::{InvalidFilePath, InvalidSourceFilePath, IoError}; +use crate::file_config::FileError::{ + InvalidFilePath, InvalidSourceFilePath, InvalidSourceUrl, IoError, +}; use crate::source::{Source, TileInfoSources}; use crate::utils::{sorted_opt_map, Error, IdResolver, OptOneMany}; use crate::OptOneMany::{Many, One}; @@ -21,6 +24,9 @@ pub enum FileError { #[error("Source path is not a file: {}", .0.display())] InvalidFilePath(PathBuf), + #[error("Error {0} while parsing URL {1}")] + InvalidSourceUrl(url::ParseError, String), + #[error("Source {0} uses bad file {}", .1.display())] InvalidSourceFilePath(String, PathBuf), @@ -29,6 +35,9 @@ pub enum FileError { #[error(r#"Unable to aquire connection to file: {0}"#)] AquireConnError(String), + + #[error(r#"PMTiles error {0} processing {1}"#)] + PmtError(pmtiles::error::Error, String), } #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] @@ -167,6 +176,10 @@ pub struct FileConfigSource { pub path: PathBuf, } +async fn dummy_resolver(_id: String, _url: Url) -> Result, FileError> { + unreachable!() +} + pub async fn resolve_files( config: &mut FileConfigEnum, idr: IdResolver, @@ -176,19 +189,39 @@ pub async fn resolve_files( where Fut: Future, FileError>>, { - resolve_int(config, idr, extension, new_source) + let dummy = &mut dummy_resolver; + resolve_int(config, idr, extension, false, new_source, dummy) .map_err(crate::Error::from) .await } -async fn resolve_int( +pub async fn resolve_files_urls( config: &mut FileConfigEnum, idr: IdResolver, extension: &str, - new_source: &mut impl FnMut(String, PathBuf) -> Fut, + new_source: &mut impl FnMut(String, PathBuf) -> Fut1, + new_url_source: &mut impl FnMut(String, Url) -> Fut2, +) -> Result +where + Fut1: Future, FileError>>, + Fut2: Future, FileError>>, +{ + resolve_int(config, idr, extension, true, new_source, new_url_source) + .map_err(crate::Error::from) + .await +} + +async fn resolve_int( + config: &mut FileConfigEnum, + idr: IdResolver, + extension: &str, + parse_urls: bool, + new_source: &mut impl FnMut(String, PathBuf) -> Fut1, + new_url_source: &mut impl FnMut(String, Url) -> Fut2, ) -> Result where - Fut: Future, FileError>>, + Fut1: Future, FileError>>, + Fut2: Future, FileError>>, { let Some(cfg) = config.extract_file_config() else { return Ok(TileInfoSources::default()); @@ -201,67 +234,76 @@ where if let Some(sources) = cfg.sources { for (id, source) in sources { - let can = source.abs_path()?; - if !can.is_file() { - // todo: maybe warn instead? - return Err(InvalidSourceFilePath(id.to_string(), can)); - } - - let dup = !files.insert(can.clone()); - let dup = if dup { "duplicate " } else { "" }; - let id = idr.resolve(&id, can.to_string_lossy().to_string()); - info!("Configured {dup}source {id} from {}", can.display()); - configs.insert(id.clone(), source.clone()); + if let Some(url) = parse_url(parse_urls, source.get_path())? { + let dup = !files.insert(source.get_path().clone()); + let dup = if dup { "duplicate " } else { "" }; + let id = idr.resolve(&id, url.to_string()); + configs.insert(id.clone(), source); + results.push(new_url_source(id.clone(), url.clone()).await?); + info!("Configured {dup}source {id} from {}", sanitize_url(&url)); + } else { + let can = source.abs_path()?; + if !can.is_file() { + // todo: maybe warn instead? + return Err(InvalidSourceFilePath(id.to_string(), can)); + } - let path = match source { - FileConfigSrc::Obj(pmt) => pmt.path, - FileConfigSrc::Path(path) => path, - }; - results.push(new_source(id, path).await?); + let dup = !files.insert(can.clone()); + let dup = if dup { "duplicate " } else { "" }; + let id = idr.resolve(&id, can.to_string_lossy().to_string()); + info!("Configured {dup}source {id} from {}", can.display()); + configs.insert(id.clone(), source.clone()); + results.push(new_source(id, source.into_path()).await?); + } } } for path in cfg.paths { - let is_dir = path.is_dir(); - let dir_files = if is_dir { - // directories will be kept in the config just in case there are new files - directories.push(path.clone()); - path.read_dir() - .map_err(|e| IoError(e, path.clone()))? - .filter_map(Result::ok) - .filter(|f| { - f.path().extension().filter(|e| *e == extension).is_some() && f.path().is_file() + if let Some(url) = parse_url(parse_urls, &path)? { + let id = url + .path_segments() + .and_then(Iterator::last) + .and_then(|s| { + // Strip extension and trailing dot, or keep the original string + s.strip_suffix(extension) + .and_then(|s| s.strip_suffix('.')) + .or(Some(s)) }) - .map(|f| f.path()) - .collect() - } else if path.is_file() { - vec![path] + .unwrap_or("pmt_web_source"); + + let id = idr.resolve(id, url.to_string()); + configs.insert(id.clone(), FileConfigSrc::Path(path)); + results.push(new_url_source(id.clone(), url.clone()).await?); + info!("Configured source {id} from URL {}", sanitize_url(&url)); } else { - return Err(InvalidFilePath(path.canonicalize().unwrap_or(path))); - }; - for path in dir_files { - let can = path.canonicalize().map_err(|e| IoError(e, path.clone()))?; - if files.contains(&can) { - if !is_dir { - warn!("Ignoring duplicate MBTiles path: {}", can.display()); + let is_dir = path.is_dir(); + let dir_files = if is_dir { + // directories will be kept in the config just in case there are new files + directories.push(path.clone()); + dir_to_paths(&path, extension)? + } else if path.is_file() { + vec![path] + } else { + return Err(InvalidFilePath(path.canonicalize().unwrap_or(path))); + }; + for path in dir_files { + let can = path.canonicalize().map_err(|e| IoError(e, path.clone()))?; + if files.contains(&can) { + if !is_dir { + warn!("Ignoring duplicate MBTiles path: {}", can.display()); + } + continue; } - continue; + let id = path.file_stem().map_or_else( + || "_unknown".to_string(), + |s| s.to_string_lossy().to_string(), + ); + let id = idr.resolve(&id, can.to_string_lossy().to_string()); + info!("Configured source {id} from {}", can.display()); + files.insert(can); + configs.insert(id.clone(), FileConfigSrc::Path(path.clone())); + results.push(new_source(id, path).await?); } - let id = path.file_stem().map_or_else( - || "_unknown".to_string(), - |s| s.to_string_lossy().to_string(), - ); - let source = FileConfigSrc::Path(path); - let id = idr.resolve(&id, can.to_string_lossy().to_string()); - info!("Configured source {id} from {}", can.display()); - files.insert(can); - configs.insert(id.clone(), source.clone()); - - let path = match source { - FileConfigSrc::Obj(pmt) => pmt.path, - FileConfigSrc::Path(path) => path, - }; - results.push(new_source(id, path).await?); } } @@ -270,6 +312,41 @@ where Ok(results) } +fn dir_to_paths(path: &Path, extension: &str) -> Result, FileError> { + Ok(path + .read_dir() + .map_err(|e| IoError(e, path.to_path_buf()))? + .filter_map(Result::ok) + .filter(|f| { + f.path().extension().filter(|e| *e == extension).is_some() && f.path().is_file() + }) + .map(|f| f.path()) + .collect()) +} + +fn sanitize_url(url: &Url) -> String { + let mut result = format!("{}://", url.scheme()); + if let Some(host) = url.host_str() { + result.push_str(host); + } + if let Some(port) = url.port() { + result.push(':'); + result.push_str(&port.to_string()); + } + result.push_str(url.path()); + result +} + +fn parse_url(is_enabled: bool, path: &Path) -> Result, FileError> { + if !is_enabled { + return Ok(None); + } + path.to_str() + .filter(|v| v.starts_with("http://") || v.starts_with("https://")) + .map(|v| Url::parse(v).map_err(|e| InvalidSourceUrl(e, v.to_string()))) + .transpose() +} + #[cfg(test)] mod tests { use std::collections::HashMap; @@ -285,10 +362,14 @@ mod tests { paths: - /dir-path - /path/to/file2.ext + - http://example.com/file.ext sources: pm-src1: /tmp/file.ext pm-src2: path: /tmp/file.ext + pm-src3: https://example.com/file3.ext + pm-src4: + path: https://example.com/file4.ext "}) .unwrap(); let res = cfg.finalize("").unwrap(); @@ -302,6 +383,7 @@ mod tests { vec![ PathBuf::from("/dir-path"), PathBuf::from("/path/to/file2.ext"), + PathBuf::from("http://example.com/file.ext"), ] ); assert_eq!( @@ -316,7 +398,17 @@ mod tests { FileConfigSrc::Obj(FileConfigSource { path: PathBuf::from("/tmp/file.ext"), }) - ) + ), + ( + "pm-src3".to_string(), + FileConfigSrc::Path(PathBuf::from("https://example.com/file3.ext")) + ), + ( + "pm-src4".to_string(), + FileConfigSrc::Obj(FileConfigSource { + path: PathBuf::from("https://example.com/file4.ext"), + }) + ), ])) ); } diff --git a/martin/src/pmtiles/file_pmtiles.rs b/martin/src/pmtiles/file_pmtiles.rs new file mode 100644 index 000000000..d27fbd01b --- /dev/null +++ b/martin/src/pmtiles/file_pmtiles.rs @@ -0,0 +1,167 @@ +use std::fmt::{Debug, Formatter}; +use std::io; +use std::path::PathBuf; +use std::sync::Arc; + +use async_trait::async_trait; +use log::{trace, warn}; +use martin_tile_utils::{Encoding, Format, TileInfo}; +use pmtiles::async_reader::AsyncPmTilesReader; +use pmtiles::mmap::MmapBackend; +use pmtiles::{Compression, TileType}; +use tilejson::TileJSON; + +use crate::file_config::FileError; +use crate::file_config::FileError::{InvalidMetadata, IoError}; +use crate::source::{Source, Tile, UrlQuery}; +use crate::{Error, Xyz}; + +#[derive(Clone)] +pub struct PmtFileSource { + id: String, + path: PathBuf, + pmtiles: Arc>, + tilejson: TileJSON, + tile_info: TileInfo, +} + +impl Debug for PmtFileSource { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "PmtFileSource {{ id: {}, path: {:?} }}", + self.id, self.path + ) + } +} + +impl PmtFileSource { + pub async fn new_box(id: String, path: PathBuf) -> Result, FileError> { + Ok(Box::new(PmtFileSource::new(id, path).await?)) + } + + async fn new(id: String, path: PathBuf) -> Result { + let backend = MmapBackend::try_from(path.as_path()) + .await + .map_err(|e| { + io::Error::new( + io::ErrorKind::Other, + format!("{e:?}: Cannot open file {}", path.display()), + ) + }) + .map_err(|e| IoError(e, path.clone()))?; + + let reader = AsyncPmTilesReader::try_from_source(backend).await; + let reader = reader + .map_err(|e| { + io::Error::new( + io::ErrorKind::Other, + format!("{e:?}: Cannot open file {}", path.display()), + ) + }) + .map_err(|e| IoError(e, path.clone()))?; + + Self::new_int(id, path, reader).await + } +} + +impl PmtFileSource { + async fn new_int( + id: String, + path: PathBuf, + reader: AsyncPmTilesReader, + ) -> Result { + let hdr = &reader.header; + + if hdr.tile_type != TileType::Mvt && hdr.tile_compression != Compression::None { + return Err(InvalidMetadata( + format!( + "Format {:?} and compression {:?} are not yet supported", + hdr.tile_type, hdr.tile_compression + ), + path, + )); + } + + let format = match hdr.tile_type { + TileType::Mvt => TileInfo::new( + Format::Mvt, + match hdr.tile_compression { + Compression::None => Encoding::Uncompressed, + Compression::Unknown => { + warn!( + "MVT tiles have unknown compression in file {}", + path.display() + ); + Encoding::Uncompressed + } + Compression::Gzip => Encoding::Gzip, + Compression::Brotli => Encoding::Brotli, + Compression::Zstd => Encoding::Zstd, + }, + ), + // All these assume uncompressed data (validated above) + TileType::Png => Format::Png.into(), + TileType::Jpeg => Format::Jpeg.into(), + TileType::Webp => Format::Webp.into(), + TileType::Unknown => { + return Err(InvalidMetadata( + "Unknown tile type".to_string(), + path.clone(), + )) + } + }; + + let tilejson = reader.parse_tilejson(Vec::new()).await.unwrap_or_else(|e| { + warn!("{e:?}: Unable to parse metadata for {}", path.display()); + hdr.get_tilejson(Vec::new()) + }); + + Ok(Self { + id, + path, + pmtiles: Arc::new(reader), + tilejson, + tile_info: format, + }) + } +} + +#[async_trait] +impl Source for PmtFileSource { + fn get_id(&self) -> &str { + &self.id + } + + fn get_tilejson(&self) -> &TileJSON { + &self.tilejson + } + + fn get_tile_info(&self) -> TileInfo { + self.tile_info + } + + fn clone_source(&self) -> Box { + Box::new(self.clone()) + } + + async fn get_tile(&self, xyz: &Xyz, _url_query: &Option) -> Result { + // TODO: optimize to return Bytes + if let Some(t) = self + .pmtiles + .get_tile(xyz.z, u64::from(xyz.x), u64::from(xyz.y)) + .await + { + Ok(t.data.to_vec()) + } else { + trace!( + "Couldn't find tile data in {}/{}/{} of {}", + xyz.z, + xyz.x, + xyz.y, + &self.id + ); + Ok(Vec::new()) + } + } +} diff --git a/martin/src/pmtiles/http_pmtiles.rs b/martin/src/pmtiles/http_pmtiles.rs new file mode 100644 index 000000000..5b9e2e0e5 --- /dev/null +++ b/martin/src/pmtiles/http_pmtiles.rs @@ -0,0 +1,147 @@ +use std::fmt::{Debug, Formatter}; +use std::sync::Arc; + +use async_trait::async_trait; +use log::{trace, warn}; +use martin_tile_utils::{Encoding, Format, TileInfo}; +use pmtiles::async_reader::AsyncPmTilesReader; +use pmtiles::http::HttpBackend; +use pmtiles::{Compression, TileType}; +use reqwest::Client; +use tilejson::TileJSON; +use url::Url; + +use crate::file_config::FileError; +use crate::file_config::FileError::InvalidMetadata; +use crate::source::{Source, Tile, UrlQuery}; +use crate::{Error, Xyz}; + +#[derive(Clone)] +pub struct PmtHttpSource { + id: String, + url: Url, + pmtiles: Arc>, + tilejson: TileJSON, + tile_info: TileInfo, +} + +impl Debug for PmtHttpSource { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "PmtHttpSource {{ id: {}, path: {:?} }}", + self.id, self.url + ) + } +} + +impl PmtHttpSource { + pub async fn new_url_box(id: String, url: Url) -> Result, FileError> { + let client = Client::new(); + Ok(Box::new(PmtHttpSource::new_url(client, id, url).await?)) + } + + async fn new_url(client: Client, id: String, url: Url) -> Result { + let reader = AsyncPmTilesReader::new_with_url(client, url.clone()).await; + let reader = reader.map_err(|e| FileError::PmtError(e, url.to_string()))?; + Self::new_int(id, url, reader).await + } +} + +impl PmtHttpSource { + async fn new_int( + id: String, + url: Url, + reader: AsyncPmTilesReader, + ) -> Result { + let hdr = &reader.header; + + if hdr.tile_type != TileType::Mvt && hdr.tile_compression != Compression::None { + return Err(InvalidMetadata( + format!( + "Format {:?} and compression {:?} are not yet supported", + hdr.tile_type, hdr.tile_compression + ), + url.to_string().into(), + )); + } + + let format = match hdr.tile_type { + TileType::Mvt => TileInfo::new( + Format::Mvt, + match hdr.tile_compression { + Compression::None => Encoding::Uncompressed, + Compression::Unknown => { + warn!("MVT tiles have unknown compression in file {url}"); + Encoding::Uncompressed + } + Compression::Gzip => Encoding::Gzip, + Compression::Brotli => Encoding::Brotli, + Compression::Zstd => Encoding::Zstd, + }, + ), + // All these assume uncompressed data (validated above) + TileType::Png => Format::Png.into(), + TileType::Jpeg => Format::Jpeg.into(), + TileType::Webp => Format::Webp.into(), + TileType::Unknown => { + return Err(InvalidMetadata( + "Unknown tile type".to_string(), + url.to_string().into(), + )) + } + }; + + let tilejson = reader.parse_tilejson(Vec::new()).await.unwrap_or_else(|e| { + warn!("{e:?}: Unable to parse metadata for {url}"); + hdr.get_tilejson(Vec::new()) + }); + + Ok(Self { + id, + url, + pmtiles: Arc::new(reader), + tilejson, + tile_info: format, + }) + } +} + +#[async_trait] +impl Source for PmtHttpSource { + fn get_id(&self) -> &str { + &self.id + } + + fn get_tilejson(&self) -> &TileJSON { + &self.tilejson + } + + fn get_tile_info(&self) -> TileInfo { + self.tile_info + } + + fn clone_source(&self) -> Box { + Box::new(self.clone()) + } + + async fn get_tile(&self, xyz: &Xyz, _url_query: &Option) -> Result { + // TODO: optimize to return Bytes + if let Some(t) = self + .pmtiles + .get_tile(xyz.z, u64::from(xyz.x), u64::from(xyz.y)) + .await + { + Ok(t.data.to_vec()) + } else { + trace!( + "Couldn't find tile data in {}/{}/{} of {}", + xyz.z, + xyz.x, + xyz.y, + &self.id + ); + Ok(Vec::new()) + } + } +} diff --git a/martin/src/pmtiles/mod.rs b/martin/src/pmtiles/mod.rs index 0b678c928..747faa158 100644 --- a/martin/src/pmtiles/mod.rs +++ b/martin/src/pmtiles/mod.rs @@ -1,151 +1,4 @@ -use std::fmt::{Debug, Formatter}; -use std::io; -use std::path::PathBuf; -use std::sync::Arc; - -use async_trait::async_trait; -use log::{trace, warn}; -use martin_tile_utils::{Encoding, Format, TileInfo}; -use pmtiles::async_reader::AsyncPmTilesReader; -use pmtiles::mmap::MmapBackend; -use pmtiles::{Compression, TileType}; -use tilejson::TileJSON; - -use crate::file_config::FileError; -use crate::file_config::FileError::{InvalidMetadata, IoError}; -use crate::source::{Source, Tile, UrlQuery}; -use crate::{Error, Xyz}; - -#[derive(Clone)] -pub struct PmtSource { - id: String, - path: PathBuf, - pmtiles: Arc>, - tilejson: TileJSON, - tile_info: TileInfo, -} - -impl Debug for PmtSource { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "PmtSource {{ id: {}, path: {:?} }}", self.id, self.path) - } -} - -impl PmtSource { - pub async fn new_box(id: String, path: PathBuf) -> Result, FileError> { - Ok(Box::new(PmtSource::new(id, path).await?)) - } - - async fn new(id: String, path: PathBuf) -> Result { - let backend = MmapBackend::try_from(path.as_path()) - .await - .map_err(|e| { - io::Error::new( - io::ErrorKind::Other, - format!("{e:?}: Cannot open file {}", path.display()), - ) - }) - .map_err(|e| IoError(e, path.clone()))?; - - let reader = AsyncPmTilesReader::try_from_source(backend).await; - let reader = reader - .map_err(|e| { - io::Error::new( - io::ErrorKind::Other, - format!("{e:?}: Cannot open file {}", path.display()), - ) - }) - .map_err(|e| IoError(e, path.clone()))?; - let hdr = &reader.header; - - if hdr.tile_type != TileType::Mvt && hdr.tile_compression != Compression::None { - return Err(InvalidMetadata( - format!( - "Format {:?} and compression {:?} are not yet supported", - hdr.tile_type, hdr.tile_compression - ), - path, - )); - } - - let format = match hdr.tile_type { - TileType::Mvt => TileInfo::new( - Format::Mvt, - match hdr.tile_compression { - Compression::None => Encoding::Uncompressed, - Compression::Unknown => { - warn!( - "MVT tiles have unknown compression in file {}", - path.display() - ); - Encoding::Uncompressed - } - Compression::Gzip => Encoding::Gzip, - Compression::Brotli => Encoding::Brotli, - Compression::Zstd => Encoding::Zstd, - }, - ), - TileType::Png => Format::Png.into(), - TileType::Jpeg => Format::Jpeg.into(), - TileType::Webp => Format::Webp.into(), - TileType::Unknown => { - return Err(InvalidMetadata( - "Unknown tile type".to_string(), - path.clone(), - )) - } - }; - - let tilejson = reader.parse_tilejson(Vec::new()).await.unwrap_or_else(|e| { - warn!("{e:?}: Unable to parse metadata for {}", path.display()); - hdr.get_tilejson(Vec::new()) - }); - - Ok(Self { - id, - path, - pmtiles: Arc::new(reader), - tilejson, - tile_info: format, - }) - } -} - -#[async_trait] -impl Source for PmtSource { - fn get_id(&self) -> &str { - &self.id - } - - fn get_tilejson(&self) -> &TileJSON { - &self.tilejson - } - - fn get_tile_info(&self) -> TileInfo { - self.tile_info - } - - fn clone_source(&self) -> Box { - Box::new(self.clone()) - } - - async fn get_tile(&self, xyz: &Xyz, _url_query: &Option) -> Result { - // TODO: optimize to return Bytes - if let Some(t) = self - .pmtiles - .get_tile(xyz.z, u64::from(xyz.x), u64::from(xyz.y)) - .await - { - Ok(t.data.to_vec()) - } else { - trace!( - "Couldn't find tile data in {}/{}/{} of {}", - xyz.z, - xyz.x, - xyz.y, - &self.id - ); - Ok(Vec::new()) - } - } -} +mod file_pmtiles; +mod http_pmtiles; +pub use file_pmtiles::PmtFileSource; +pub use http_pmtiles::PmtHttpSource; diff --git a/tests/config.yaml b/tests/config.yaml index 06ed8f98b..0ca249dcf 100644 --- a/tests/config.yaml +++ b/tests/config.yaml @@ -163,7 +163,7 @@ postgres: pmtiles: sources: pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles - pmt2: tests/fixtures/pmtiles2/webp2.pmtiles + pmt2: http://localhost:5412/webp2.pmtiles sprites: paths: tests/fixtures/sprites/src1 diff --git a/tests/expected/generated_config.yaml b/tests/expected/generated_config.yaml index c2317e9ce..20acee04f 100644 --- a/tests/expected/generated_config.yaml +++ b/tests/expected/generated_config.yaml @@ -191,16 +191,14 @@ pmtiles: paths: - tests/fixtures/mbtiles - tests/fixtures/pmtiles - - tests/fixtures/pmtiles2 sources: png: tests/fixtures/pmtiles/png.pmtiles stamen_toner__raster_CC-BY-ODbL_z3: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles - webp2: tests/fixtures/pmtiles2/webp2.pmtiles + webp2: http://localhost:5412/webp2.pmtiles mbtiles: paths: - tests/fixtures/mbtiles - tests/fixtures/pmtiles - - tests/fixtures/pmtiles2 sources: geography-class-jpg: tests/fixtures/mbtiles/geography-class-jpg.mbtiles geography-class-jpg-diff: tests/fixtures/mbtiles/geography-class-jpg-diff.mbtiles diff --git a/tests/expected/given_config.yaml b/tests/expected/given_config.yaml index 3de20ebcb..d0f1d88ae 100644 --- a/tests/expected/given_config.yaml +++ b/tests/expected/given_config.yaml @@ -160,7 +160,7 @@ postgres: pmtiles: sources: pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles - pmt2: tests/fixtures/pmtiles2/webp2.pmtiles + pmt2: http://localhost:5412/webp2.pmtiles sprites: paths: tests/fixtures/sprites/src1 sources: diff --git a/tests/test.sh b/tests/test.sh index 1423385b2..cd78532f3 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -3,6 +3,7 @@ set -euo pipefail # TODO: use --fail-with-body to get the response body on failure CURL=${CURL:-curl --silent --show-error --fail --compressed} +STATICS_URL="${STATICS_URL:-http://localhost:5412}" DATABASE_URL="${DATABASE_URL:-postgres://postgres@localhost/db}" MARTIN_BUILD="${MARTIN_BUILD:-cargo build}" MARTIN_PORT="${MARTIN_PORT:-3111}" @@ -169,13 +170,17 @@ if [[ "$MBTILES_BUILD" != "-" ]]; then fi +echo "------------------------------------------------------------------------------------------------------------------------" +echo "Check HTTP server is running" +$CURL --head "$STATICS_URL/webp2.pmtiles" + echo "------------------------------------------------------------------------------------------------------------------------" echo "Test auto configured Martin" TEST_OUT_DIR="$(dirname "$0")/output/auto" mkdir -p "$TEST_OUT_DIR" -ARG=(--default-srid 900913 --auto-bounds calc --save-config "$(dirname "$0")/output/generated_config.yaml" tests/fixtures/mbtiles tests/fixtures/pmtiles tests/fixtures/pmtiles2 --sprite tests/fixtures/sprites/src1 --font tests/fixtures/fonts/overpass-mono-regular.ttf --font tests/fixtures/fonts) +ARG=(--default-srid 900913 --auto-bounds calc --save-config "$(dirname "$0")/output/generated_config.yaml" tests/fixtures/mbtiles tests/fixtures/pmtiles "$STATICS_URL/webp2.pmtiles" --sprite tests/fixtures/sprites/src1 --font tests/fixtures/fonts/overpass-mono-regular.ttf --font tests/fixtures/fonts) set -x $MARTIN_BIN "${ARG[@]}" 2>&1 | tee "${LOG_DIR}/test_log_1.txt" & MARTIN_PROC_ID=`jobs -p | tail -n 1`