aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorDaniel Schadt <kingdread@gmx.de>2023-01-18 20:46:26 +0100
committerDaniel Schadt <kingdread@gmx.de>2023-01-18 20:46:26 +0100
commita6e3e58877307d7c9113fb229d46ada5f165eadc (patch)
tree35a1d5d3a8e00369ea958ec02df688430d78a995 /src
parentac3afadba547b4b9a4063da567acd6d2f4f74554 (diff)
downloadhittekaart-a6e3e58877307d7c9113fb229d46ada5f165eadc.tar.gz
hittekaart-a6e3e58877307d7c9113fb229d46ada5f165eadc.tar.bz2
hittekaart-a6e3e58877307d7c9113fb229d46ada5f165eadc.zip
add some more docstrings
Diffstat (limited to 'src')
-rw-r--r--src/gpx.rs30
-rw-r--r--src/layer.rs28
-rw-r--r--src/lib.rs8
-rw-r--r--src/renderer.rs23
-rw-r--r--src/storage.rs44
5 files changed, 125 insertions, 8 deletions
diff --git a/src/gpx.rs b/src/gpx.rs
index 9dd7725..fb9e00e 100644
--- a/src/gpx.rs
+++ b/src/gpx.rs
@@ -3,6 +3,9 @@
//! We *could* use the [gpx](https://github.com/georust/gpx) crate, but we don't care about much
//! other than the coordinates of the tracks. By implementing the little functionality ourselves,
//! we can use a fast XML parser ([roxmltree](https://github.com/RazrFalcon/roxmltree)).
+//!
+//! Note that we throw away all information that we don't care about. Since we need only the
+//! coordinates of a track, we simply use a `Vec<Coordinates>` to represent a track.
use std::{
f64::consts::PI,
ffi::OsStr,
@@ -15,6 +18,7 @@ use color_eyre::eyre::{eyre, Result};
use flate2::bufread::GzDecoder;
use roxmltree::{Document, Node, NodeType};
+/// World coordinates.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Coordinates {
longitude: f64,
@@ -24,13 +28,17 @@ pub struct Coordinates {
impl Coordinates {
/// Calculates the [Web Mercator
/// projection](https://en.wikipedia.org/wiki/Web_Mercator_projection) of the coordinates.
- /// Returns the `(x, y)` coordinates.
+ ///
+ /// Returns the `(x, y)` projection, where both are in the range `[0, 256 * 2^zoom)`.
pub fn web_mercator(self, zoom: u32) -> (u64, u64) {
+ const WIDTH: f64 = super::layer::TILE_WIDTH as f64;
+ const HEIGHT: f64 = super::layer::TILE_HEIGHT as f64;
+
let lambda = self.longitude.to_radians();
let phi = self.latitude.to_radians();
- let x = 2u64.pow(zoom) as f64 / (2.0 * PI) * 256.0 * (lambda + PI);
+ let x = 2u64.pow(zoom) as f64 / (2.0 * PI) * WIDTH * (lambda + PI);
let y =
- 2u64.pow(zoom) as f64 / (2.0 * PI) * 256.0 * (PI - (PI / 4.0 + phi / 2.0).tan().ln());
+ 2u64.pow(zoom) as f64 / (2.0 * PI) * HEIGHT * (PI - (PI / 4.0 + phi / 2.0).tan().ln());
(x.floor() as u64, y.floor() as u64)
}
}
@@ -47,6 +55,7 @@ fn is_track_point(node: &Node) -> bool {
node.node_type() == NodeType::Element && node.tag_name().name() == "trkpt"
}
+/// Extracts a track from the given string.
pub fn extract_from_str(input: &str) -> Result<Vec<Coordinates>> {
let mut result = Vec::new();
let document = Document::parse(input)?;
@@ -71,14 +80,26 @@ pub fn extract_from_str(input: &str) -> Result<Vec<Coordinates>> {
Ok(result)
}
+/// Compression format of the data.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Compression {
+ /// Indicates that no compression is applied, and the file is plain GPX.
None,
+ /// Indicates that the file is gzip compressed.
Gzip,
+ /// Indicates that the file is brotli compressed.
Brotli,
}
impl Compression {
+ /// Suggests a [`Compression`] from the given path name.
+ ///
+ /// This will suggest [`Compression::Brotli`] for files ending in `.br`, [`Compression::Gzip`]
+ /// for files ending with `.gz` or `.gzip`, and [`Compression::None`] for files ending with
+ /// `.gpx`.
+ ///
+ /// If the file does not end with any of the aforementioned extensions, an error is returned
+ /// instead.
pub fn suggest_from_path<P: AsRef<Path>>(path: P) -> Option<Compression> {
let Some(ext) = path.as_ref().extension() else { return None };
if OsStr::new("br") == ext {
@@ -93,6 +114,9 @@ impl Compression {
}
}
+/// Extracts the relevant GPX data from the given file.
+///
+/// Note that the content must be valid UTF-8, as that is what our parser expects.
pub fn extract_from_file<P: AsRef<Path>>(
path: P,
compression: Compression,
diff --git a/src/layer.rs b/src/layer.rs
index 2726c81..dec2419 100644
--- a/src/layer.rs
+++ b/src/layer.rs
@@ -18,12 +18,17 @@ use image::{
use num_traits::Zero;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
+/// Height of a single tile.
pub const TILE_HEIGHT: u64 = 256;
+/// Width of a single tile.
pub const TILE_WIDTH: u64 = 256;
type TileIndex = (u64, u64);
/// Main "lazy image buffer" struct.
+///
+/// This lazily allocates a new tile (of size [`TILE_WIDTH`] × [`TILE_HEIGHT`]) for each mutable
+/// pixel access. Each tile is pre-filled with the given default pixel.
#[derive(Debug, Clone)]
pub struct TileLayer<P: Pixel> {
tiles: FnvHashMap<TileIndex, ImageBuffer<P, Vec<P::Subpixel>>>,
@@ -31,6 +36,9 @@ pub struct TileLayer<P: Pixel> {
}
impl<P: Pixel> TileLayer<P> {
+ /// Construct a new lazy buffer with the given default (background) pixel.
+ ///
+ /// Note that this does not yet allocate any image tiles.
pub fn from_pixel(pixel: P) -> Self {
TileLayer {
tiles: Default::default(),
@@ -38,19 +46,25 @@ impl<P: Pixel> TileLayer<P> {
}
}
+ /// Iterates over all tiles, together with their indices.
pub fn enumerate_tiles(
&self,
) -> impl Iterator<Item = (u64, u64, &ImageBuffer<P, Vec<P::Subpixel>>)> {
self.tiles.iter().map(|((x, y), t)| (*x, *y, t))
}
+ /// Returns a mutable reference to the given tile.
+ ///
+ /// This allocates a new tile if the requested tile does not yet exist.
pub fn tile_mut(&mut self, tile_x: u64, tile_y: u64) -> &mut ImageBuffer<P, Vec<P::Subpixel>> {
self.tiles.entry((tile_x, tile_y)).or_insert_with(|| {
ImageBuffer::from_pixel(TILE_WIDTH as u32, TILE_HEIGHT as u32, self.default_pixel)
})
}
- /// Enumerate all pixels that are explicitely set in this layer.
+ /// Enumerate all pixels that are allocated.
+ ///
+ /// This provides access to the pixel and its coordinates.
pub fn enumerate_pixels(&self) -> impl Iterator<Item = (u64, u64, &P)> {
self.tiles.iter().flat_map(|((tx, ty), tile)| {
tile.enumerate_pixels().map(move |(x, y, p)| {
@@ -63,10 +77,12 @@ impl<P: Pixel> TileLayer<P> {
})
}
+ /// Iterate over all pixels that are allocated.
pub fn pixels(&self) -> impl Iterator<Item = &P> {
self.enumerate_pixels().map(|x| x.2)
}
+ /// Returns the number of allocated tiles.
pub fn tile_count(&self) -> usize {
self.tiles.len()
}
@@ -78,8 +94,8 @@ impl<P: Pixel> TileLayer<P> {
///
/// The top-left pixel of `source` is copied to `(x, y)`.
///
- /// This method is more efficient than repeatedly calling [`get_pixel_mut`], as it groups
- /// pixels by tile and only does one tile lookup.
+ /// This method is more efficient than copying pixels one by one, as it groups them by tile and
+ /// only does one tile lookup then.
pub fn blit_nonzero(&mut self, x: u64, y: u64, source: &ImageBuffer<P, Vec<P::Subpixel>>) {
let zero = zero_pixel::<P>();
let source_width = u64::from(source.width());
@@ -114,6 +130,7 @@ where
P: Pixel + Send,
P::Subpixel: Send,
{
+ /// Turns this lazy tile layer into a parallelized iterator.
pub fn into_parallel_tiles(
self,
) -> impl ParallelIterator<Item = (u64, u64, ImageBuffer<P, Vec<P::Subpixel>>)> {
@@ -121,11 +138,15 @@ where
}
}
+/// Saves the given image buffer to the given path.
pub fn compress_png<P: AsRef<Path>>(image: &RgbaImage, path: P) -> Result<()> {
let outstream = BufWriter::new(File::create(path)?);
compress_png_stream(image, outstream)
}
+/// Saves the given image buffer to the given stream.
+///
+/// Note that this uses the best compression available.
pub fn compress_png_stream<W: Write>(image: &RgbaImage, outstream: W) -> Result<()> {
let encoder =
PngEncoder::new_with_quality(outstream, CompressionType::Best, FilterType::Adaptive);
@@ -135,6 +156,7 @@ pub fn compress_png_stream<W: Write>(image: &RgbaImage, outstream: W) -> Result<
Ok(())
}
+/// Encodes the given image buffer and returns its data as a vector.
pub fn compress_png_as_bytes(image: &RgbaImage) -> Result<Vec<u8>> {
let mut buffer = Vec::new();
compress_png_stream(image, &mut buffer)?;
diff --git a/src/lib.rs b/src/lib.rs
index 670678f..d6a6a46 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,4 +1,10 @@
-//! This is a stub library for `hittekaart` to expose the functions for benchmarking.
+//! `hittekaart` is a program to generate heatmaps from GPX tracks.
+//!
+//! Note that this crate is not meant to be used as a library. Instead, use the command line
+//! program that is included in this crate.
+//!
+//! This library therefore contains an API that is tailored to the use case of the `hittekaart`
+//! binary.
pub mod gpx;
pub mod layer;
pub mod renderer;
diff --git a/src/renderer.rs b/src/renderer.rs
index 74a321c..d7aefe4 100644
--- a/src/renderer.rs
+++ b/src/renderer.rs
@@ -1,3 +1,11 @@
+//! Actual rendering functions for heatmaps.
+//!
+//! We begin the rendering by using [`render_heatcounter`] to turn a list of GPX tracks into a
+//! [`HeatCounter`], which is basically a grayscale heatmap, where each pixel represents the number
+//! of tracks that goes through this pixel.
+//!
+//! We then render the colored heatmap tiles using [`lazy_colorization`], which provides us with
+//! colorful PNG data.
use std::thread;
use color_eyre::{eyre::Result, Report};
@@ -10,13 +18,18 @@ use super::{
layer::{self, TileLayer},
};
+/// Represents a fully rendered tile.
#[derive(Debug, Clone)]
pub struct RenderedTile {
+ /// The `x` coordinate of the tile.
pub x: u64,
+ /// The `y` coordinate of the tile.
pub y: u64,
+ /// The encoded (PNG) image data, ready to be saved to disk.
pub data: Vec<u8>,
}
+/// Type for the intermediate heat counters.
pub type HeatCounter = TileLayer<Luma<u8>>;
fn render_circle<P: Pixel>(layer: &mut TileLayer<P>, center: (u64, u64), radius: u64, pixel: P) {
@@ -138,7 +151,11 @@ fn colorize_tile(tile: &ImageBuffer<Luma<u8>, Vec<u8>>, max: u32) -> RgbaImage {
/// Lazily colorizes a [`HeatCounter`] by colorizing it tile-by-tile and saving a tile before
/// rendering the next one.
///
-/// This has a way lower memory usage than [`colorize_heatcounter`].
+/// This function calls the given callback with each rendered tile, and the function is responsible
+/// for saving it. If the callback returns an `Err(...)`, the error is passed through.
+///
+/// Note that this function internally uses `rayon` for parallization. If you want to limit the
+/// number of threads used, set up the global [`rayon::ThreadPool`] first.
pub fn lazy_colorization<F: FnMut(RenderedTile) -> Result<()> + Send>(
layer: HeatCounter,
mut save_callback: F,
@@ -177,6 +194,10 @@ pub fn lazy_colorization<F: FnMut(RenderedTile) -> Result<()> + Send>(
}
/// Renders the heat counter for the given zoom level and track points.
+///
+/// The given callback will be called when a track has been rendered and merged into the
+/// accumulator, to allow for UI feedback. The passed parameter is the number of tracks that have
+/// been rendered since the last call.
pub fn render_heatcounter<F: Fn(usize) + Send + Sync>(
zoom: u32,
tracks: &[Vec<Coordinates>],
diff --git a/src/storage.rs b/src/storage.rs
index 51a418e..9e6b270 100644
--- a/src/storage.rs
+++ b/src/storage.rs
@@ -1,3 +1,9 @@
+//! Abstractions over different storage backends.
+//!
+//! The main trait to use here is [`Storage`], which provides the necessary interface to store
+//! tiles. Usually you want to have a `dyn Storage`, and then instantiate it with a concrete
+//! implementation (either [`Folder`] or [`Sqlite`]), depending on the command line flags or
+//! similar.
use color_eyre::{
eyre::{bail, WrapErr},
Result,
@@ -9,19 +15,40 @@ use std::{
path::{Path, PathBuf},
};
+/// The trait that provides the interface for storing tiles.
pub trait Storage {
+ /// Prepare the storage.
+ ///
+ /// This can be used to e.g. ensure the directory exists, or to create the database.
fn prepare(&mut self) -> Result<()>;
+ /// Prepare for a given zoom level.
+ ///
+ /// This function is called once per zoom, and can be used e.g. to set up the inner folder for
+ /// the level. This can avoid unnecesary syscalls if this setup would be done in
+ /// [`Storage::store`] instead.
fn prepare_zoom(&mut self, zoom: u32) -> Result<()>;
+ /// Store the given data for the tile.
fn store(&mut self, zoom: u32, x: u64, y: u64, data: &[u8]) -> Result<()>;
+ /// Finish the storing operation.
+ ///
+ /// This can flush any buffers, commit database changes, and so on.
fn finish(&mut self) -> Result<()>;
}
+/// Folder-based storage.
+///
+/// This stores the tiles according to the [slippy map
+/// tilenames](https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames).
#[derive(Debug)]
pub struct Folder {
base_dir: PathBuf,
}
impl Folder {
+ /// Create a new folder based storage.
+ ///
+ /// The given directory is the "root" directory, so a tile would be saved as
+ /// `base_dir/{zoom}/{x}/{y}.png`.
pub fn new(base_dir: PathBuf) -> Self {
Folder { base_dir }
}
@@ -70,12 +97,29 @@ impl Storage for Folder {
}
}
+/// SQLite based storage.
+///
+/// This stores tiles in a SQLite database. The database will have a single table:
+///
+/// ```sql
+/// CREATE TABLE tiles (
+/// zoom INTEGER,
+/// x INTEGER,
+/// y INTEGER,
+/// data BLOB,
+/// PRIMARY KEY (zoom, x, y)
+/// );
+/// ```
#[derive(Debug)]
pub struct Sqlite {
connection: Connection,
}
impl Sqlite {
+ /// Create a new SQLite backed tile store.
+ ///
+ /// The database will be saved at the given location. Note that the database must not yet
+ /// exist.
pub fn connect<P: AsRef<Path>>(file: P) -> Result<Self> {
let path = file.as_ref();
if fs::metadata(path).is_ok() {