// cargo-deps: hsl, image, csv = "1.0.0-beta.4", serde, serde_derive extern crate csv; extern crate hsl; extern crate image; extern crate serde; #[macro_use] extern crate serde_derive; use std::collections::BTreeSet; use std::io; #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct Tile { tile_x: usize, tile_y: usize, create_time: u64, modify_count: u64, modify_time: u64, access_count: u64, access_time: u64, } // Torus size (in tiles). const TORUS_SZ: u32 = 512; // Tile width/height (in pixels). const TILE_W: u32 = 8; const TILE_H: u32 = 5; fn main() { // Read the `torus.csv` into a 2D array of `(access, modify)`. // Also track the values we see in `BTreeSet` (for ordering). let mut tiles = [[(0, 0); TORUS_SZ as usize]; TORUS_SZ as usize]; let mut ord_a = BTreeSet::new(); let mut ord_m = BTreeSet::new(); for result in csv::Reader::from_reader(io::stdin()).deserialize() { let Tile { access_count: a, modify_count: m, tile_x: x, tile_y: y, .. } = result.unwrap(); // Ignore 0,0, the values are too large. if (x, y) != (0, 0) { ord_a.insert(a); ord_m.insert(m); } // Handle old migrated values. if a == 0 && m > 0 { tiles[y][x] = (1, 0); } else { tiles[y][x] = (a, m); } } // Chunk the values by splitting them at the largest gaps. #[derive(PartialEq, Eq, PartialOrd, Ord)] struct Chunk { start: u64, } fn chunks(ord: BTreeSet, chunks: usize) -> Vec { let mut gaps = BTreeSet::new(); let mut prev = 0; let mut run = 0; for &x in &ord { let gap = x - prev; if gap > 100 { // Favor gaps with longer runs before them. gaps.insert((gap + run * 2, x)); run = 0; } else { run += 1; } prev = x; } let mut chunks: Vec<_> = gaps.iter() .rev() .take(chunks) .map(|&(_, start)| Chunk { start }) .collect(); chunks.sort(); chunks } let chunks_a = chunks(ord_a, 15); let chunks_m = chunks(ord_m, 15); // Compose the heatmap image in-memory from the 2D array. let mut heatmap = image::ImageBuffer::new(TORUS_SZ * TILE_W, TORUS_SZ * TILE_H); let red = hsl::HSL::from_rgb(&[255, 0, 0]).h; // let green = hsl::HSL::from_rgb(&[0, 255, 0]).h; // let blue = hsl::HSL::from_rgb(&[0, 0, 255]).h; let yellow = hsl::HSL::from_rgb(&[255, 255, 0]).h; for y in 0..TORUS_SZ { for x in 0..TORUS_SZ { // Normalize a value to the [0, 1] interval based on chunks. fn chunk_normalize(chunks: &[Chunk], x: u64) -> f64 { if x == 0 { 0.0 } else { // Find the chunk x is in. let i = chunks.iter().position(|c| c.start > x); let mut v = i.unwrap_or(chunks.len()) as f64; // Add the intra-chunk linear offset. if let Some(i) = i { let start = if i > 1 { chunks[i - 1].start } else { 0 }; v += (x - start) as f64 / (chunks[i].start - start) as f64; } // Normalize so all the chunks fit in [0, 1]. (v / chunks.len() as f64).min(1.0) } } // Get and normalize the values. let (a, m) = tiles[y as usize][x as usize]; let a = chunk_normalize(&chunks_a, a).powf(0.2); let m = chunk_normalize(&chunks_m, m).powf(0.2); // access => luminosity let l = (a + m).min(1.0) * 0.7; // modify => saturation + hue (grey -> red -> yellow) let h = red * (1.0 - m) + yellow * m; let s = m; let (r, g, b) = hsl::HSL { h, s, l }.to_rgb(); let rgb = image::Rgb([r, g, b]); let coord = |x, dx, px| ((x * 2 + TORUS_SZ + 1) * px / 2 + dx) % (TORUS_SZ * px); for dy in 0..TILE_H { let y = coord(y, dy, TILE_H); for dx in 0..TILE_W { let x = coord(x, dx, TILE_W); heatmap.put_pixel(x, y, rgb); } } } } // Save the heatmap image. heatmap.save("heatmap.png").unwrap(); }