|
| 1 | +//! Checks that text between tags unchanged, emitting warning otherwise, |
| 2 | +//! allowing asserting that code in different places over codebase is in sync. |
| 3 | +//! |
| 4 | +//! This works via hashing text between tags and saving hash in tidy. |
| 5 | +//! |
| 6 | +//! Usage: |
| 7 | +//! |
| 8 | +//! some.rs: |
| 9 | +//! // tidy-ticket-foo |
| 10 | +//! const FOO: usize = 42; |
| 11 | +//! // tidy-ticket-foo |
| 12 | +//! |
| 13 | +//! some.sh: |
| 14 | +//! # tidy-ticket-foo |
| 15 | +//! export FOO=42 |
| 16 | +//! # tidy-ticket-foo |
| 17 | +use md5::{Digest, Md5}; |
| 18 | +use std::fs; |
| 19 | +use std::path::Path; |
| 20 | + |
| 21 | +#[cfg(test)] |
| 22 | +mod tests; |
| 23 | + |
| 24 | +/// Return hash for source text between 2 tag occurrence, |
| 25 | +/// ignoring lines where tag written |
| 26 | +/// |
| 27 | +/// Expecting: |
| 28 | +/// tag is not multiline |
| 29 | +/// source always have at least 2 occurrence of tag (>2 ignored) |
| 30 | +fn span_hash(source: &str, tag: &str, bad: &mut bool) -> Result<String, ()> { |
| 31 | + let start_idx = match source.find(tag) { |
| 32 | + Some(idx) => idx, |
| 33 | + None => return Err(tidy_error!(bad, "tag {} should exist in provided text", tag)), |
| 34 | + }; |
| 35 | + let end_idx = { |
| 36 | + let end = match source[start_idx + tag.len()..].find(tag) { |
| 37 | + // index from source start |
| 38 | + Some(idx) => start_idx + tag.len() + idx, |
| 39 | + None => return Err(tidy_error!(bad, "tag end {} should exist in provided text", tag)), |
| 40 | + }; |
| 41 | + // second line with tag can contain some other text before tag, ignore it |
| 42 | + // by finding position of previous line ending |
| 43 | + // |
| 44 | + // FIXME: what if line ending is \r\n? In that case \r will be hashed too |
| 45 | + let offset = source[start_idx..end].rfind('\n').unwrap(); |
| 46 | + start_idx + offset |
| 47 | + }; |
| 48 | + |
| 49 | + let mut hasher = Md5::new(); |
| 50 | + |
| 51 | + source[start_idx..end_idx] |
| 52 | + .lines() |
| 53 | + // skip first line with tag |
| 54 | + .skip(1) |
| 55 | + // hash next lines, ignoring end trailing whitespaces |
| 56 | + .for_each(|line| { |
| 57 | + let trimmed = line.trim_end(); |
| 58 | + hasher.update(trimmed); |
| 59 | + }); |
| 60 | + Ok(format!("{:x}", hasher.finalize())) |
| 61 | +} |
| 62 | + |
| 63 | +fn check_entry(entry: &ListEntry<'_>, bad: &mut bool, root_path: &Path) { |
| 64 | + let file = fs::read_to_string(root_path.join(Path::new(entry.0))).unwrap(); |
| 65 | + let actual_hash = span_hash(&file, entry.2, bad).unwrap(); |
| 66 | + if actual_hash != entry.1 { |
| 67 | + // Write tidy error description for wather only once. |
| 68 | + // Will not work if there was previous errors of other types. |
| 69 | + if *bad == false { |
| 70 | + tidy_error!( |
| 71 | + bad, |
| 72 | + "Mismatched hashes for tidy watcher found.\n\ |
| 73 | + Check src/tools/tidy/src/watcher.rs, find tag/hash in TIDY_WATCH_LIST list \ |
| 74 | + and verify that sources for provided group of tags in sync. If they in sync, update hash." |
| 75 | + ) |
| 76 | + } |
| 77 | + tidy_error!( |
| 78 | + bad, |
| 79 | + "hash for tag `{}` in path `{}` mismatch:\n actual: `{}`, expected: `{}`\n", |
| 80 | + entry.2, |
| 81 | + entry.0, |
| 82 | + actual_hash, |
| 83 | + entry.1 |
| 84 | + ); |
| 85 | + } |
| 86 | +} |
| 87 | + |
| 88 | +/// (path, hash, tag) |
| 89 | +type ListEntry<'a> = (&'a str, &'a str, &'a str); |
| 90 | + |
| 91 | +/// List of tags to watch, along with paths and hashes |
| 92 | +#[rustfmt::skip] |
| 93 | +const TIDY_WATCH_LIST: &[ListEntry<'_>] = &[ |
| 94 | + ("src/tools/opt-dist/src/environment/windows.rs", "dcad53f163a2775164b5d2faaa70b653", "tidy-ticket-perf-commit"), |
| 95 | + ("src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile", "76c8d9783e38e25a461355f82fcd7955", "tidy-ticket-perf-commit"), |
| 96 | +]; |
| 97 | + |
| 98 | +pub fn check(root_path: &Path, bad: &mut bool) { |
| 99 | + for entry in TIDY_WATCH_LIST { |
| 100 | + check_entry(entry, bad, root_path); |
| 101 | + } |
| 102 | +} |
0 commit comments