summary refs log tree commit diff
path: root/ascii-town-heatmap.rs
blob: 4edc4d6268c2ef2bb134fe44ada16e651b542461 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
// cargo-deps: hsl, image, csv = "1.0.0-beta.4", serde, serde_derive

extern crate csv;
extern crate hsl;
extern crate image;
extern crate serde;
#[macro_use]
extern crate serde_derive;

use std::collections::BTreeSet;
use std::io;

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Tile {
    tile_x: usize,
    tile_y: usize,
    create_time: u64,
    modify_count: u64,
    modify_time: u64,
    access_count: u64,
    access_time: u64,
}

// Torus size (in tiles).
const TORUS_SZ: u32 = 512;

// Tile width/height (in pixels).
const TILE_W: u32 = 8;
const TILE_H: u32 = 5;

fn main() {
    // Read the `torus.csv` into a 2D array of `(access, modify)`.
    // Also track the values we see in `BTreeSet` (for ordering).
    let mut tiles = [[(0, 0); TORUS_SZ as usize]; TORUS_SZ as usize];
    let mut ord_a = BTreeSet::new();
    let mut ord_m = BTreeSet::new();
    for result in csv::Reader::from_reader(io::stdin()).deserialize() {
        let Tile {
            access_count: a,
            modify_count: m,
            tile_x: x,
            tile_y: y,
            ..
        } = result.unwrap();

        // Ignore 0,0, the values are too large.
        if (x, y) != (0, 0) {
            ord_a.insert(a);
            ord_m.insert(m);
        }

        // Handle old migrated values.
        if a == 0 && m > 0 {
            tiles[y][x] = (1, 0);
        } else {
            tiles[y][x] = (a, m);
        }
    }

    // Chunk the values by splitting them at the largest gaps.
    #[derive(PartialEq, Eq, PartialOrd, Ord)]
    struct Chunk {
        start: u64,
    }
    fn chunks(ord: BTreeSet<u64>, chunks: usize) -> Vec<Chunk> {
        let mut gaps = BTreeSet::new();
        let mut prev = 0;
        let mut run = 0;
        for &x in &ord {
            let gap = x - prev;
            if gap > 100 {
                // Favor gaps with longer runs before them.
                gaps.insert((gap + run * 2, x));
                run = 0;
            } else {
                run += 1;
            }
            prev = x;
        }
        let mut chunks: Vec<_> = gaps.iter()
            .rev()
            .take(chunks)
            .map(|&(_, start)| Chunk { start })
            .collect();
        chunks.sort();
        chunks
    }
    let chunks_a = chunks(ord_a, 15);
    let chunks_m = chunks(ord_m, 15);

    // Compose the heatmap image in-memory from the 2D array.
    let mut heatmap = image::ImageBuffer::new(TORUS_SZ * TILE_W, TORUS_SZ * TILE_H);
    let red = hsl::HSL::from_rgb(&[255, 0, 0]).h;
    // let green = hsl::HSL::from_rgb(&[0, 255, 0]).h;
    // let blue = hsl::HSL::from_rgb(&[0, 0, 255]).h;
    let yellow = hsl::HSL::from_rgb(&[255, 255, 0]).h;
    for y in 0..TORUS_SZ {
        for x in 0..TORUS_SZ {
            // Normalize a value to the [0, 1] interval based on chunks.
            fn chunk_normalize(chunks: &[Chunk], x: u64) -> f64 {
                if x == 0 {
                    0.0
                } else {
                    // Find the chunk x is in.
                    let i = chunks.iter().position(|c| c.start > x);
                    let mut v = i.unwrap_or(chunks.len()) as f64;

                    // Add the intra-chunk linear offset.
                    if let Some(i) = i {
                        let start = if i > 1 {
                            chunks[i - 1].start
                        } else {
                            0
                        };
                        v += (x - start) as f64 / (chunks[i].start - start) as f64;
                    }

                    // Normalize so all the chunks fit in [0, 1].
                    (v / chunks.len() as f64).min(1.0)
                }
            }
            // Get and normalize the values.
            let (a, m) = tiles[y as usize][x as usize];
            let a = chunk_normalize(&chunks_a, a).powf(0.2);
            let m = chunk_normalize(&chunks_m, m).powf(0.2);

            // access => luminosity
            let l = (a + m).min(1.0) * 0.7;
            // modify => saturation + hue (grey -> red -> yellow)
            let h = red * (1.0 - m) + yellow * m;
            let s = m;

            let (r, g, b) = hsl::HSL { h, s, l }.to_rgb();
            let rgb = image::Rgb([r, g, b]);

            let coord = |x, dx, px| ((x * 2 + TORUS_SZ + 1) * px / 2 + dx) % (TORUS_SZ * px);
            for dy in 0..TILE_H {
                let y = coord(y, dy, TILE_H);
                for dx in 0..TILE_W {
                    let x = coord(x, dx, TILE_W);
                    heatmap.put_pixel(x, y, rgb);
                }
            }
        }
    }

    // Save the heatmap image.
    heatmap.save("heatmap.png").unwrap();
}