diff --git a/.travis.yml b/.travis.yml index 8f11202..f9d66a0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,6 +7,9 @@ sudo: false script: - cargo build --verbose - cargo test --verbose + - cargo package + - cd target/package/unicode-normalization-* + - RUSTFLAGS="--cfg minimal_tests" cargo test --verbose notifications: email: on_success: never diff --git a/Cargo.toml b/Cargo.toml index e4727c7..9c82408 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,7 +18,7 @@ Decomposition and Recomposition, as described in Unicode Standard Annex #15. """ -exclude = [ "target/*", "Cargo.lock", "scripts/tmp", "*.txt", "src/normalization_tests.rs", "src/test.rs" ] +exclude = [ "target/*", "Cargo.lock", "scripts/tmp", "*.txt", "src/normalization_tests.rs" ] [dependencies] smallvec = "0.6" \ No newline at end of file diff --git a/README.md b/README.md index 591849d..d8d4fb3 100644 --- a/README.md +++ b/README.md @@ -33,3 +33,26 @@ to your `Cargo.toml`: [dependencies] unicode-normalization = "0.1.8" ``` + +## Linux Vendors / Downstream +As is, tests won't work on the published crate, as important +corpus data required for fully testing functionality otherwise +bloats the size of the crate. + +Tests aren't hugely meaningful without this, but there are two +workarounds: + +```bash +RUSTFLAGS="--cfg minimal_tests" cargo test +``` + +This will make the crate compile, and some arbitrary set of lower +quality tests pass. + +```bash +python scripts/unicode.py +cp ./normalization_tests.rs src/ +``` + +This will generate the full corpus required for tests to work, +without needing to pass any special flags. diff --git a/src/lib.rs b/src/lib.rs index 7a4b4e0..9aebcbb 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -75,7 +75,7 @@ mod tables; #[cfg(test)] mod test; -#[cfg(test)] +#[cfg(all(test,not(minimal_tests)))] mod normalization_tests; /// Methods for composing and decomposing characters. diff --git a/src/stream_safe.rs b/src/stream_safe.rs index 38bb42c..25a7ae1 100644 --- a/src/stream_safe.rs +++ b/src/stream_safe.rs @@ -111,6 +111,7 @@ mod tests { classify_nonstarters, }; use std::char; + #[cfg(not(minimal_tests))] use normalization_tests::NORMALIZATION_TESTS; use normalize::decompose_compatible; use lookups::canonical_combining_class; @@ -119,6 +120,7 @@ mod tests { StreamSafe::new(s.chars()).collect() } + #[cfg(not(minimal_tests))] #[test] fn test_normalization_tests_unaffected() { for test in NORMALIZATION_TESTS { diff --git a/src/test.rs b/src/test.rs index 4c7d2eb..c0864ce 100644 --- a/src/test.rs +++ b/src/test.rs @@ -95,6 +95,7 @@ fn test_nfkc() { t!("a\u{300}\u{305}\u{315}\u{5ae}b", "\u{e0}\u{5ae}\u{305}\u{315}b"); } +#[cfg(not(minimal_tests))] #[test] fn test_official() { use normalization_tests::NORMALIZATION_TESTS; @@ -158,7 +159,7 @@ fn test_official() { } } } - +#[cfg(not(minimal_tests))] #[test] fn test_quick_check() { use normalization_tests::NORMALIZATION_TESTS;