diff --git a/CHANGELOG.md b/CHANGELOG.md index 71d375a7ff..9b4defa74a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## 0.16.0 (unreleased) + +### Breaking + +- Switch to pulldown-cmark anchor system rather than ours, some (very niche) edge cases are not supported anymore, you can +also specify classes on headers now +- Now outputs empty taxonomies instead of ignoring them +- Unify all pages sorting variable names in templates to `lower`/`higher` in order to make it easy to re-use templates and it +was becoming hard to come up with names to be honest + +### Other +- Fix markup for fenced code with linenos +- Make `ignored_content` work with nested paths and directories +- `zola serve/build` can now run from anywhere in a zola directory +- Add XML support to `load_data` +- Add YAML support to `load_data` +- `skip_prefixes` is now checked before parsing external link URLs +- Add `render` attribute to taxonomies configuration in `config.toml`, for when you don't want to render +any pages related to that taxonomy +- Serialize `transparent` field from front-matter of sections +- Use Zola Tera instance for markdown filter: this means you have access to the same Tera functions as in shortcodes +- Ignore sections with `render=false` when looking for path collisions +- Add support for backlinks +- Add a warning mode for internal/external link checking in case you don't want zola to stop the build on invalid links +- Always follow symlinks when loading the site/assets +- Add `rel="alternate"` to Atom post links +- Fix taxonomy `current_path` +- Fix feed location for taxonomies not in the default language +- Add `title_bytes` sorting method +- Add `insert_anchor = "heading"`, which allows users to use the entire heading as a link +- Apply orientation transformation based on EXIF data +- Fix generated homepages not having their `translations` filled properly + ## 0.15.3 (2022-01-23) - Fix shortcodes not being rendered in code blocks diff --git a/Cargo.lock b/Cargo.lock index f595977aef..ea8f3bd95a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,6 +14,17 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.7", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.18" @@ -25,28 +36,17 @@ dependencies = [ [[package]] name = "ammonia" -version = "3.1.3" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b74b175af97d1aecc1add0878b1cbfcbf3bd4c22d7713eeb6d597da23e29bc0d" +checksum = "d5ed2509ee88cc023cccee37a6fab35826830fe8b748b3869790e7720c2c4a74" dependencies = [ "html5ever", - "lazy_static", "maplit", - "markup5ever_rcdom", - "matches", + "once_cell", "tendril", "url", ] -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi 0.3.9", -] - [[package]] name = "any_ascii" version = "0.1.7" @@ -55,9 +55,9 @@ checksum = "70033777eb8b5124a81a1889416543dddef2de240019b674c81285a2635a7e1e" [[package]] name = "anyhow" -version = "1.0.53" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0" +checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704" [[package]] name = "arrayvec" @@ -76,9 +76,9 @@ checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "assert-json-diff" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f1c3703dd33532d7f0ca049168930e9099ecac238e23cf932f3a69c42f06da" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" dependencies = [ "serde", "serde_json", @@ -97,9 +97,9 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "base64" @@ -116,12 +116,30 @@ dependencies = [ "serde", ] +[[package]] +name = "bit_field" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb6dd1c2376d2e096796e234a70e17e94cc2d5d54ff8ce42b28cef1d0d359a4" + [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitvec" +version = "0.19.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + [[package]] name = "block-buffer" version = "0.7.3" @@ -136,9 +154,9 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.9.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" dependencies = [ "generic-array 0.14.5", ] @@ -164,11 +182,17 @@ dependencies = [ "serde", ] +[[package]] +name = "build_const" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ae4235e6dac0694637c763029ecea1a2ec9e4e06ec2729bd21ba4d9c863eb7" + [[package]] name = "bumpalo" -version = "3.9.1" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" +checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" [[package]] name = "byte-tools" @@ -184,15 +208,15 @@ checksum = "f861d9ce359f56dbcb6e0c2a1cb84e52ad732cadb57b806adeb3c7668caccbd8" [[package]] name = "bytecount" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e" +checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" [[package]] name = "bytemuck" -version = "1.7.3" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439989e6b8c38d1b6570a384ef1e49c8848128f5a97f3914baef02920842712f" +checksum = "c53dfa917ec274df8ed3c572698f381a24eef2efba9492d797301b72b6db408a" [[package]] name = "byteorder" @@ -218,9 +242,9 @@ checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" [[package]] name = "cc" -version = "1.0.72" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" dependencies = [ "jobserver", ] @@ -255,8 +279,6 @@ dependencies = [ "libc", "num-integer", "num-traits", - "serde", - "time 0.1.43", "winapi 0.3.9", ] @@ -268,7 +290,7 @@ checksum = "58549f1842da3080ce63002102d5bc954c7bc843d4f47818e642abdc36253552" dependencies = [ "chrono", "chrono-tz-build", - "phf 0.10.1", + "phf", ] [[package]] @@ -278,23 +300,56 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db058d493fb2f65f41861bfed7e3fe6335264a9f0f92710cab5bdf01fef09069" dependencies = [ "parse-zoneinfo", - "phf 0.10.1", - "phf_codegen 0.10.0", + "phf", + "phf_codegen", ] [[package]] name = "clap" -version = "2.34.0" +version = "3.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +checksum = "ab8b79fe3946ceb4a0b1c080b4018992b8d27e9ff363644c1c9b6387c854614d" dependencies = [ - "ansi_term", "atty", "bitflags", + "clap_derive", + "clap_lex", + "indexmap", + "once_cell", "strsim", + "termcolor", "textwrap", - "unicode-width", - "vec_map", +] + +[[package]] +name = "clap_complete" +version = "3.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ead064480dfc4880a10764488415a97fdd36a4cf1bb022d372f02e8faf8386e1" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_derive" +version = "3.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759bf187376e1afa7b85b959e6a664a3e7a95203415dba952ad19139e798f902" +dependencies = [ + "heck", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", ] [[package]] @@ -318,23 +373,59 @@ dependencies = [ name = "config" version = "0.1.0" dependencies = [ - "chrono", "errors", - "globset", - "lazy_static", + "libs", "serde", - "serde_derive", - "syntect", - "toml", - "unic-langid", "utils", ] +[[package]] +name = "console" +version = "0.1.0" +dependencies = [ + "errors", + "libs", +] + +[[package]] +name = "console" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "terminal_size", + "winapi 0.3.9", +] + +[[package]] +name = "content" +version = "0.1.0" +dependencies = [ + "config", + "errors", + "libs", + "markdown", + "serde", + "tempfile", + "test-case", + "time", + "utils", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" dependencies = [ "core-foundation-sys", "libc", @@ -348,27 +439,36 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" +checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d663548de7f5cca343f1e0a48d14dcfb0e9eb4e079ec58883b7251539fa10aeb" +dependencies = [ + "build_const", +] + [[package]] name = "crc32fast" -version = "1.3.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2209c310e29876f7f0b2721e7e26b84aff178aa3da5d091f9bfbf47669e60e3" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ "cfg-if 1.0.0", ] [[package]] name = "crossbeam-channel" -version = "0.5.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa" +checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -387,25 +487,47 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.6" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97242a70df9b89a65d0b6df3c4bf5b9ce03c5b7309019777fbde37e7537f8762" +checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d" dependencies = [ + "autocfg", "cfg-if 1.0.0", "crossbeam-utils", - "lazy_static", "memoffset", + "once_cell", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.6" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120" +checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83" dependencies = [ "cfg-if 1.0.0", - "lazy_static", + "once_cell", +] + +[[package]] +name = "crypto-common" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ccfd8c0ee4cce11e45b3fd6f9d5e69e0cc62912aa6a0cb1bf4617b0eba5a12f" +dependencies = [ + "generic-array 0.14.5", + "typenum", +] + +[[package]] +name = "css-minify" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692b185e3b7c9af96b3195f3021f53a931d896968ed2ad3fb1cdb6558b30c9ab" +dependencies = [ + "derive_more", + "indexmap", + "nom 6.1.2", ] [[package]] @@ -432,9 +554,9 @@ dependencies = [ [[package]] name = "ctrlc" -version = "3.2.1" +version = "3.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19c6cedffdc8c03a3346d723eb20bd85a13362bb96dc2ac000842c6381ec7bf" +checksum = "b37feaa84e6861e00a1f5e5aa8da3ee56d605c9992d33e082786754828e20865" dependencies = [ "nix", "winapi 0.3.9", @@ -442,25 +564,31 @@ dependencies = [ [[package]] name = "deflate" -version = "0.8.6" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73770f8e1fe7d64df17ca66ad28994a0a623ea497fa69486e14984e715c5d174" +checksum = "c86f7e25f518f4b81808a2cf1c50996a61f5c2eb394b2393bd87f2a4780a432f" dependencies = [ "adler32", - "byteorder", ] [[package]] -name = "deunicode" -version = "0.4.3" +name = "derive_more" +version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "850878694b7933ca4c9569d30a34b55031b9b139ee1fc7b94a527c4ef960d690" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn", +] [[package]] -name = "difference" -version = "2.0.0" +name = "deunicode" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" +checksum = "850878694b7933ca4c9569d30a34b55031b9b139ee1fc7b94a527c4ef960d690" [[package]] name = "digest" @@ -473,11 +601,12 @@ dependencies = [ [[package]] name = "digest" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ - "generic-array 0.14.5", + "block-buffer 0.10.2", + "crypto-common", ] [[package]] @@ -488,18 +617,17 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "either" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" [[package]] name = "elasticlunr-rs" -version = "2.3.14" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60eee99ae400fb1c4521ea3bd678994cb66572754d240449368e8ecd40281569" +checksum = "e6dae5cac90640734ee881bc5f21b6e5123f4e5235e52428db114abffc2391d6" dependencies = [ "jieba-rs", - "lazy_static", "lindera", "lindera-core", "regex", @@ -507,10 +635,14 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "strum", - "strum_macros", ] +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + [[package]] name = "encoding" version = "0.2.33" @@ -577,21 +709,47 @@ checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" [[package]] name = "encoding_rs" -version = "0.8.30" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dc8abb250ffdda33912550faa54c88ec8b998dec0b2c55ab224921ce11df" +checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "env_logger" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + [[package]] name = "errors" version = "0.1.0" dependencies = [ - "image", - "syntect", - "tera", - "toml", + "anyhow", +] + +[[package]] +name = "exr" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14cc0e06fb5f67e5d6beadf3a382fec9baca1aa751c6d5368fdeee7e5932c215" +dependencies = [ + "bit_field", + "deflate", + "flume", + "half", + "inflate", + "lebe", + "smallvec", + "threadpool", ] [[package]] @@ -611,26 +769,37 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.15" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" +checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c" dependencies = [ "cfg-if 1.0.0", "libc", "redox_syscall", - "winapi 0.3.9", + "windows-sys", ] [[package]] name = "flate2" -version = "1.0.22" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ - "cfg-if 1.0.0", "crc32fast", - "libc", - "miniz_oxide 0.4.4", + "miniz_oxide", +] + +[[package]] +name = "flume" +version = "0.10.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ceeb589a3157cac0ab8cc585feb749bd2cea5cb55a6ee802ad72d9fd38303da" +dependencies = [ + "futures-core", + "futures-sink", + "nanorand", + "pin-project", + "spin 0.9.4", ] [[package]] @@ -664,23 +833,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "front_matter" -version = "0.1.0" -dependencies = [ - "chrono", - "errors", - "lazy_static", - "regex", - "serde", - "serde_derive", - "serde_yaml", - "tera", - "test-case", - "toml", - "utils", -] - [[package]] name = "fsevent" version = "0.4.0" @@ -716,11 +868,17 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + [[package]] name = "futf" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c9c1ce3fa9336301af935ab852c437817d14cd33690446569392e65170aac3b" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" dependencies = [ "mac", "new_debug_unreachable", @@ -728,42 +886,42 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ "futures-core", ] [[package]] name = "futures-core" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-io" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" +checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" [[package]] name = "futures-sink" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" +checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" [[package]] name = "futures-task" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-util" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ "futures-core", "futures-io", @@ -815,13 +973,15 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.4" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" +checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" dependencies = [ "cfg-if 1.0.0", + "js-sys", "libc", - "wasi 0.10.2+wasi-snapshot-preview1", + "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -830,15 +990,15 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6af39cf9a679d7195b3370f5454381ba49c4791bc7ce3ae2a7bf1a2a89c7adf" dependencies = [ - "phf 0.10.1", + "phf", "regex", ] [[package]] name = "gif" -version = "0.11.3" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3a7187e78088aead22ceedeee99779455b23fc231fe13ec443f99bb71694e5b" +checksum = "3edd93c6756b4dfaf2709eafcc345ba2636565295c198a9cfbf75fa5e3e00b06" dependencies = [ "color_quant", "weezl", @@ -852,9 +1012,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "globset" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd" +checksum = "0a1e17342619edbc21a964c2afbeb6c820c6a2560032872f397bb97ea127bd0a" dependencies = [ "aho-corasick", "bstr", @@ -876,9 +1036,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.10" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9de88456263e249e241fcd211d3954e2c9b0ef7ccfc235a444eb367cae3689" +checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" dependencies = [ "bytes 1.1.0", "fnv", @@ -893,20 +1053,29 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + [[package]] name = "hashbrown" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +[[package]] +name = "hashbrown" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "607c8a29735385251a339424dd462993c0fed8fa09d378f259377df08c126022" + [[package]] name = "heck" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" [[package]] name = "hermit-abi" @@ -919,9 +1088,9 @@ dependencies = [ [[package]] name = "html5ever" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aafcf38a1a36118242d29b92e1b08ef84e67e4a5ed06e0a80be20e6a32bfed6b" +checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" dependencies = [ "log", "mac", @@ -933,20 +1102,20 @@ dependencies = [ [[package]] name = "http" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ "bytes 1.1.0", "fnv", - "itoa 1.0.1", + "itoa 1.0.2", ] [[package]] name = "http-body" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes 1.1.0", "http", @@ -955,9 +1124,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.5.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" +checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" [[package]] name = "httpdate" @@ -971,11 +1140,17 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02296996cb8796d7c6e3bc2d9211b7802812d36999a51bb754123ead7d37d026" +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + [[package]] name = "hyper" -version = "0.14.16" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ "bytes 1.1.0", "futures-channel", @@ -986,7 +1161,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 0.4.8", + "itoa 1.0.2", "pin-project-lite", "socket2", "tokio", @@ -1052,13 +1227,14 @@ dependencies = [ [[package]] name = "image" -version = "0.23.14" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24ffcb7e7244a9bf19d35bf2883b9c080c4ced3c07a9895572178cdb8f13f6a1" +checksum = "28edd9d7bc256be2502e325ac0628bde30b7001b9b52e0abe31a1a9dc2701212" dependencies = [ "bytemuck", "byteorder", "color_quant", + "exr", "gif", "jpeg-decoder", "num-iter", @@ -1075,27 +1251,30 @@ version = "0.1.0" dependencies = [ "config", "errors", - "image", - "lazy_static", - "rayon", - "regex", + "kamadak-exif", + "libs", "serde", - "serde_json", - "site", - "svg_metadata", - "tera", + "tempfile", "utils", - "webp", ] [[package]] name = "indexmap" -version = "1.8.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.12.2", +] + +[[package]] +name = "inflate" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cdb29978cc5797bd8dcc8e5bf7de604891df2a8dc576973d71a281e916db2ff" +dependencies = [ + "adler32", ] [[package]] @@ -1119,10 +1298,24 @@ dependencies = [ ] [[package]] -name = "instant" -version = "0.1.12" +name = "insta" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "4126dd76ebfe2561486a1bd6738a33d2029ffb068a99ac446b7f8c77b2e58dbc" +dependencies = [ + "console 0.15.0", + "once_cell", + "serde", + "serde_json", + "serde_yaml", + "similar", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ "cfg-if 1.0.0", ] @@ -1138,9 +1331,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" +checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b" [[package]] name = "itoa" @@ -1150,9 +1343,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "itoa" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" [[package]] name = "jieba-rs" @@ -1162,10 +1355,10 @@ checksum = "8c7e12f50325401dde50c29ca32cff44bae20873135b39f4e19ecf305226dd80" dependencies = [ "cedarwood", "fxhash", - "hashbrown", + "hashbrown 0.11.2", "lazy_static", - "phf 0.10.1", - "phf_codegen 0.10.0", + "phf", + "phf_codegen", "regex", ] @@ -1180,22 +1373,31 @@ dependencies = [ [[package]] name = "jpeg-decoder" -version = "0.1.22" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229d53d58899083193af11e15917b5640cd40b29ff475a1fe4ef725deb02d0f2" +checksum = "9478aa10f73e7528198d75109c8be5cd7d15fb530238040148d5f9a22d4c5b3b" dependencies = [ "rayon", ] [[package]] name = "js-sys" -version = "0.3.56" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +checksum = "c3fac17f7123a73ca62df411b1bf727ccc805daa070338fda671c86dac1bdc27" dependencies = [ "wasm-bindgen", ] +[[package]] +name = "kamadak-exif" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70494964492bf8e491eb3951c5d70c9627eb7100ede6cc56d748b9a3f302cfb6" +dependencies = [ + "mutate_once", +] + [[package]] name = "kernel32-sys" version = "0.2.2" @@ -1218,6 +1420,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" +[[package]] +name = "lebe" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7efd1d698db0759e6ef11a7cd44407407399a910c774dd804c64c032da7826ff" + [[package]] name = "lexical-core" version = "0.7.6" @@ -1242,81 +1450,138 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.113" +version = "0.2.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eef78b64d87775463c549fbd80e19249ef436ea3bf1de2a1eb7e717ec7fab1e9" +checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] -name = "library" +name = "libs" version = "0.1.0" dependencies = [ - "chrono", - "config", - "errors", - "front_matter", + "ahash", + "ammonia", + "atty", + "base64", + "csv", + "elasticlunr-rs", + "filetime", + "gh-emoji", + "glob", "globset", - "lazy_static", + "image", "lexical-sort", + "minify-html", + "nom-bibtex", + "num-format", + "once_cell", + "percent-encoding", + "pulldown-cmark", + "quickxml_to_serde", "rayon", "regex", - "rendering", - "serde", - "serde_derive", - "slotmap", - "tempfile", + "relative-path", + "reqwest", + "sass-rs", + "serde_json", + "serde_yaml", + "sha2", + "slug", + "svg_metadata", + "syntect", "tera", + "termcolor", + "time", "toml", - "utils", + "unic-langid", + "unicode-segmentation", + "url", "walkdir", + "webp", ] [[package]] name = "libwebp-sys" -version = "0.2.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e70c064738b35a28fd6f991d27c0d9680353641d167ae3702a8228dd8272ef6" +checksum = "439fd1885aa28937e7edcd68d2e793cb4a22f8733460d2519fbafd2b215672bf" dependencies = [ "cc", ] [[package]] name = "lindera" -version = "0.8.1" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e067b79992ab4ee575f5113ca7ccc1b011f67378f7627169e9bf95d48a8d481" +checksum = "7d1c5db4b1d12637aa316dc1adb215f78fe79025080af750942516c5ff17d1a0" dependencies = [ "anyhow", "bincode", "byteorder", "encoding", + "lindera-cc-cedict-builder", "lindera-core", "lindera-dictionary", "lindera-ipadic", "lindera-ipadic-builder", + "lindera-ko-dic-builder", + "lindera-unidic-builder", "serde", "serde_json", + "thiserror", +] + +[[package]] +name = "lindera-cc-cedict-builder" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "584491a91b758f92ef3202aaf969d837522f2c11390c4de0049a356d63bc0b0f" +dependencies = [ + "anyhow", + "bincode", + "byteorder", + "clap", + "csv", + "encoding", + "env_logger", + "glob", + "lindera-core", + "lindera-decompress", + "log", + "yada", ] [[package]] name = "lindera-core" -version = "0.8.1" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d34134111feb8c9424de5743a9ead4c22cb1c5a48cb90322ebbe21a2bc27c1" +checksum = "c726ee1bf3282621a802d50f5e03d3f88aae41456815e1d0cb2271a538ff83ec" dependencies = [ "anyhow", "bincode", "byteorder", "encoding", + "log", "serde", "thiserror", "yada", ] +[[package]] +name = "lindera-decompress" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f9df38ea9310a1256cdee64ff0ebe3f17c49314e3176e53d2213371729d6744" +dependencies = [ + "anyhow", + "lzma-rs", + "serde", +] + [[package]] name = "lindera-dictionary" -version = "0.8.1" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ac4ac60f3ca650e4ab1280a5b6d57f73267902477ab9c9fd3b6609a7fb5888" +checksum = "0a525b654642ff9f27927c5abba33f4c651e984b54a65e4f787c0b8c8e22e4a6" dependencies = [ "anyhow", "bincode", @@ -1326,9 +1591,9 @@ dependencies = [ [[package]] name = "lindera-ipadic" -version = "0.8.1" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266fda136179e607d6ebcf2ef326fbdb2a133f9bdea9a68e6ac4fa8627e47ced" +checksum = "4797e016fc7dc0709ddb8c31da3b9e923e33e14043a4ff58431dd9c447ffacd2" dependencies = [ "bincode", "byteorder", @@ -1336,28 +1601,70 @@ dependencies = [ "flate2", "lindera-core", "lindera-ipadic-builder", - "reqwest", + "once_cell", "tar", - "tokio", ] [[package]] name = "lindera-ipadic-builder" -version = "0.8.1" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ede56e474b8fda9d4df2b9dc7683018111d3298260e1f594655e34287f26c64" +checksum = "9bd3ecfb07e8810f5ba313fa836804b66120f0ea76c2d93948c2ddcf4f81fd90" dependencies = [ "anyhow", "bincode", "byteorder", "clap", "encoding", + "env_logger", "glob", "lindera-core", + "lindera-decompress", + "log", "serde", "yada", ] +[[package]] +name = "lindera-ko-dic-builder" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc86f26560ea69e91413eecc078d8e13f39b3c1fdc5a242d79d7622f6fab3a83" +dependencies = [ + "anyhow", + "bincode", + "byteorder", + "clap", + "csv", + "encoding", + "env_logger", + "glob", + "lindera-core", + "lindera-decompress", + "log", + "yada", +] + +[[package]] +name = "lindera-unidic-builder" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05c1bb8b7d38ffec7d949ee2c603b6ef96dfa7cf4937e91bad295a2d2b267b82" +dependencies = [ + "anyhow", + "bincode", + "byteorder", + "clap", + "csv", + "encoding", + "env_logger", + "glob", + "lindera-core", + "lindera-decompress", + "log", + "yada", +] + [[package]] name = "line-wrap" version = "0.1.1" @@ -1373,36 +1680,46 @@ version = "0.1.0" dependencies = [ "config", "errors", - "lazy_static", + "libs", "mockito", - "reqwest", "utils", ] [[package]] name = "linked-hash-map" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "lock_api" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" +checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" dependencies = [ + "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.14" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "lzma-rs" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aba8ecb0450dfabce4ad72085eed0a75dffe8f21f7ada05638564ea9db2d7fb1" +dependencies = [ + "byteorder", + "crc", +] + [[package]] name = "mac" version = "0.1.1" @@ -1416,29 +1733,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] -name = "markup5ever" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd" +name = "markdown" +version = "0.1.0" dependencies = [ - "log", - "phf 0.8.0", - "phf_codegen 0.8.0", - "string_cache", - "string_cache_codegen", - "tendril", + "config", + "console 0.1.0", + "errors", + "insta", + "libs", + "pest", + "pest_derive", + "templates", + "utils", ] [[package]] -name = "markup5ever_rcdom" -version = "0.1.0" +name = "markup5ever" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f015da43bcd8d4f144559a3423f4591d69b8ce0652c905374da7205df336ae2b" +checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" dependencies = [ - "html5ever", - "markup5ever", + "log", + "phf", + "phf_codegen", + "string_cache", + "string_cache_codegen", "tendril", - "xml5ever", ] [[package]] @@ -1449,9 +1769,9 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "memchr" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memoffset" @@ -1470,42 +1790,54 @@ checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" [[package]] name = "mime_guess" -version = "2.0.3" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212" +checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" dependencies = [ "mime", "unicase", ] +[[package]] +name = "minidom" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe549115a674f5ec64c754d85e37d6f42664bd0ef4ffb62b619489ad99c6cb1a" +dependencies = [ + "quick-xml", +] + [[package]] name = "minify-html" -version = "0.6.10" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac5b6b3e2eebc4c99750325b106990cf5334afcdcde6b4479e5f2c6be5ceb8" +checksum = "640e7546ebd29c7d0fe523684a5a0661281ca93b7cf01fe3023a7fa979eaf17e" dependencies = [ "aho-corasick", + "css-minify", "lazy_static", "memchr", + "minify-js", ] [[package]] -name = "miniz_oxide" -version = "0.3.7" +name = "minify-js" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791daaae1ed6889560f8c4359194f56648355540573244a5448a83ba1ecc7435" +checksum = "428521c0c8faf89847479aee75160d2254f15b3d7c5e629f4af0b7fb2e19b251" dependencies = [ - "adler32", + "aho-corasick", + "lazy_static", + "memchr", ] [[package]] name = "miniz_oxide" -version = "0.4.4" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" dependencies = [ "adler", - "autocfg", ] [[package]] @@ -1521,7 +1853,7 @@ dependencies = [ "kernel32-sys", "libc", "log", - "miow 0.2.2", + "miow", "net2", "slab", "winapi 0.2.8", @@ -1529,15 +1861,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.7.14" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", "log", - "miow 0.3.7", - "ntapi", - "winapi 0.3.9", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys", ] [[package]] @@ -1564,38 +1895,44 @@ dependencies = [ "ws2_32-sys", ] -[[package]] -name = "miow" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" -dependencies = [ - "winapi 0.3.9", -] - [[package]] name = "mockito" -version = "0.30.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d10030163d67f681db11810bc486df3149e6d91c8b4f3f96fa8b62b546c2cef8" +checksum = "401edc088069634afaa5f4a29617b36dba683c0c16fe4435a86debad23fa2f1a" dependencies = [ "assert-json-diff", "colored", - "difference", "httparse", "lazy_static", "log", - "rand 0.8.4", + "rand 0.8.5", "regex", "serde_json", "serde_urlencoded", + "similar", +] + +[[package]] +name = "mutate_once" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16cf681a23b4d0a43fc35024c176437f9dcd818db34e0f42ab456a0ee5ad497b" + +[[package]] +name = "nanorand" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3" +dependencies = [ + "getrandom 0.2.7", ] [[package]] name = "native-tls" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d" +checksum = "fd7e2f3618557f980e0b17e8856252eee3c97fa12c54dff0ca290fb6266ca4a9" dependencies = [ "lazy_static", "libc", @@ -1628,15 +1965,13 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" [[package]] name = "nix" -version = "0.23.1" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f866317acbd3a240710c63f065ffb1e4fd466259045ccb504130b7f668f35c6" +checksum = "8f17df307904acd05aa8e32e97bb20f2a0df1728bbc2d771ae8f9a90463441e9" dependencies = [ "bitflags", - "cc", "cfg-if 1.0.0", "libc", - "memoffset", ] [[package]] @@ -1656,13 +1991,26 @@ dependencies = [ "version_check", ] +[[package]] +name = "nom" +version = "6.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" +dependencies = [ + "bitvec", + "funty", + "lexical-core", + "memchr", + "version_check", +] + [[package]] name = "nom-bibtex" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9db257f6c7b9c8b3ab67ee6a4b23a290c157d183fef2ac065bf9fce5f1c1299" dependencies = [ - "nom", + "nom 5.1.2", "nom-tracable", "nom_locate 2.1.0", "quick-error", @@ -1674,7 +2022,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e012c742e1269f801f6bfe0d1ebf99d7a3f7bc1d65c970bab0e7bee439e31610" dependencies = [ - "nom", + "nom 5.1.2", "nom-tracable-macros", "nom_locate 1.0.0", "nom_locate 2.1.0", @@ -1698,7 +2046,7 @@ checksum = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35" dependencies = [ "bytecount 0.3.2", "memchr", - "nom", + "nom 5.1.2", ] [[package]] @@ -1707,9 +2055,9 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a67484adf5711f94f2f28b653bf231bff8e438be33bf5b0f35935a0db4f618a2" dependencies = [ - "bytecount 0.6.2", + "bytecount 0.6.3", "memchr", - "nom", + "nom 5.1.2", ] [[package]] @@ -1730,15 +2078,6 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "ntapi" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" -dependencies = [ - "winapi 0.3.9", -] - [[package]] name = "num-format" version = "0.4.0" @@ -1751,9 +2090,9 @@ dependencies = [ [[package]] name = "num-integer" -version = "0.1.44" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ "autocfg", "num-traits", @@ -1761,9 +2100,9 @@ dependencies = [ [[package]] name = "num-iter" -version = "0.1.42" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2021c8337a54d21aca0d59a92577a029af9431cb59b909b03252b9c164fad59" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" dependencies = [ "autocfg", "num-integer", @@ -1772,9 +2111,9 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.3.2" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" dependencies = [ "autocfg", "num-integer", @@ -1783,9 +2122,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", ] @@ -1802,24 +2141,24 @@ dependencies = [ [[package]] name = "num_threads" -version = "0.1.2" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71a1eb3a36534514077c1e079ada2fb170ef30c47d203aa6916138cf882ecd52" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" dependencies = [ "libc", ] [[package]] name = "once_cell" -version = "1.9.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" +checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" [[package]] name = "onig" -version = "6.3.1" +version = "6.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ddfe2c93bb389eea6e6d713306880c7f6dcc99a75b659ce145d962c861b225" +checksum = "1eb3502504c9c8b06634b38bfdda86a9a8cef6277f3dec4d8b17c115110dd2a3" dependencies = [ "bitflags", "lazy_static", @@ -1829,9 +2168,9 @@ dependencies = [ [[package]] name = "onig_sys" -version = "69.7.1" +version = "69.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd3eee045c84695b53b20255bb7317063df090b68e18bfac0abb6c39cf7f33e" +checksum = "8bf3fbc9b931b6c9af85d219c7943c274a6ad26cff7488a2210215edd5f49bf8" dependencies = [ "cc", "pkg-config", @@ -1843,36 +2182,42 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" -[[package]] -name = "opaque-debug" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" - [[package]] name = "open" -version = "2.0.2" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176ee4b630d174d2da8241336763bb459281dddc0f4d87f72c3b1efc9a6109b7" +checksum = "360bcc8316bf6363aa3954c3ccc4de8add167b087e0259190a043c9514f910fe" dependencies = [ "pathdiff", - "winapi 0.3.9", + "windows-sys", ] [[package]] name = "openssl" -version = "0.10.38" +version = "0.10.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" +checksum = "618febf65336490dfcf20b73f885f5651a0c89c64c2d4a8c3662585a70bf5bd0" dependencies = [ "bitflags", "cfg-if 1.0.0", "foreign-types", "libc", "once_cell", + "openssl-macros", "openssl-sys", ] +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "openssl-probe" version = "0.1.5" @@ -1881,9 +2226,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.72" +version = "0.9.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" +checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f" dependencies = [ "autocfg", "cc", @@ -1892,29 +2237,33 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "os_str_bytes" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa" + [[package]] name = "parking_lot" -version = "0.11.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "instant", "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" -version = "0.8.5" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" +checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" dependencies = [ "cfg-if 1.0.0", - "instant", "libc", "redox_syscall", "smallvec", - "winapi 0.3.9", + "windows-sys", ] [[package]] @@ -1928,9 +2277,9 @@ dependencies = [ [[package]] name = "path-slash" -version = "0.1.4" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cacbb3c4ff353b534a67fb8d7524d00229da4cb1dc8c79f4db96e375ab5b619" +checksum = "c54014ba3c1880122928735226f78b6f5bf5bd1fed15e41e92cf7aa20278ce28" [[package]] name = "pathdiff" @@ -1987,32 +2336,13 @@ dependencies = [ "sha-1", ] -[[package]] -name = "phf" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" -dependencies = [ - "phf_shared 0.8.0", -] - [[package]] name = "phf" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ - "phf_shared 0.10.0", -] - -[[package]] -name = "phf_codegen" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" -dependencies = [ - "phf_generator 0.8.0", - "phf_shared 0.8.0", + "phf_shared", ] [[package]] @@ -2021,54 +2351,55 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" dependencies = [ - "phf_generator 0.10.0", - "phf_shared 0.10.0", + "phf_generator", + "phf_shared", ] [[package]] name = "phf_generator" -version = "0.8.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ - "phf_shared 0.8.0", - "rand 0.7.3", + "phf_shared", + "rand 0.8.5", ] [[package]] -name = "phf_generator" +name = "phf_shared" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "phf_shared 0.10.0", - "rand 0.8.4", + "siphasher", + "uncased", ] [[package]] -name = "phf_shared" -version = "0.8.0" +name = "pin-project" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260" dependencies = [ - "siphasher", + "pin-project-internal", ] [[package]] -name = "phf_shared" -version = "0.10.0" +name = "pin-project-internal" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74" dependencies = [ - "siphasher", - "uncased", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "pin-project-lite" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" [[package]] name = "pin-utils" @@ -2078,9 +2409,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.24" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" +checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" [[package]] name = "plist" @@ -2092,20 +2423,20 @@ dependencies = [ "indexmap", "line-wrap", "serde", - "time 0.3.6", + "time", "xml-rs", ] [[package]] name = "png" -version = "0.16.8" +version = "0.17.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3287920cb847dee3de33d301c463fba14dda99db24214ddf93f83d3021f4c6" +checksum = "dc38c0ad57efb786dd57b9864e5b18bae478c00c824dc55a38bbc9da95dde3ba" dependencies = [ "bitflags", "crc32fast", "deflate", - "miniz_oxide 0.3.7", + "miniz_oxide", ] [[package]] @@ -2120,20 +2451,44 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + [[package]] name = "proc-macro2" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" dependencies = [ - "unicode-xid", + "unicode-ident", ] [[package]] name = "pulldown-cmark" -version = "0.8.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8" +checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6" dependencies = [ "bitflags", "memchr", @@ -2146,15 +2501,42 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" +[[package]] +name = "quick-xml" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe1e430bdcf30c9fdc25053b9c459bb1a4672af4617b6c783d7d91dc17c6bbb0" +dependencies = [ + "memchr", +] + +[[package]] +name = "quickxml_to_serde" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26f35112b35480fd72f63444289083eeedbd61d13907c82c4309f0ccda35e244" +dependencies = [ + "minidom", + "serde", + "serde_derive", + "serde_json", +] + [[package]] name = "quote" -version = "1.0.15" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + [[package]] name = "rand" version = "0.7.3" @@ -2165,20 +2547,18 @@ dependencies = [ "libc", "rand_chacha 0.2.2", "rand_core 0.5.1", - "rand_hc 0.2.0", - "rand_pcg", + "rand_hc", ] [[package]] name = "rand" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha 0.3.1", "rand_core 0.6.3", - "rand_hc 0.3.1", ] [[package]] @@ -2216,7 +2596,7 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" dependencies = [ - "getrandom 0.2.4", + "getrandom 0.2.7", ] [[package]] @@ -2228,29 +2608,11 @@ dependencies = [ "rand_core 0.5.1", ] -[[package]] -name = "rand_hc" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" -dependencies = [ - "rand_core 0.6.3", -] - -[[package]] -name = "rand_pcg" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" -dependencies = [ - "rand_core 0.5.1", -] - [[package]] name = "rayon" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" +checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" dependencies = [ "autocfg", "crossbeam-deque", @@ -2260,31 +2622,30 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" +checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "lazy_static", "num_cpus", ] [[package]] name = "redox_syscall" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" dependencies = [ "bitflags", ] [[package]] name = "regex" -version = "1.5.4" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", @@ -2299,52 +2660,30 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" - -[[package]] -name = "relative-path" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73d4caf086b102ab49d0525b721594a555ab55c6556086bbe52a430ad26c3bd7" - -[[package]] -name = "remove_dir_all" -version = "0.5.3" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi 0.3.9", -] +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" -[[package]] -name = "rendering" -version = "0.1.0" -dependencies = [ - "config", - "errors", - "front_matter", - "gh-emoji", - "lazy_static", - "link_checker", - "pest", - "pest_derive", - "pulldown-cmark", - "regex", - "serde", - "serde_derive", - "syntect", - "templates", - "tera", - "utils", +[[package]] +name = "relative-path" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0df32d82cedd1499386877b062ebe8721f806de80b08d183c70184ef17dd1d42" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi 0.3.9", ] [[package]] name = "reqwest" -version = "0.11.9" +version = "0.11.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f242f1488a539a79bac6dbe7c8609ae43b7914b7736210f239a37cccb32525" +checksum = "b75aa69a3f06bbcc66ede33af2af253c6f7a86b1ca0033f60c580a27074fbf92" dependencies = [ "base64", "bytes 1.1.0", @@ -2373,6 +2712,7 @@ dependencies = [ "tokio", "tokio-native-tls", "tokio-rustls", + "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", @@ -2390,7 +2730,7 @@ dependencies = [ "cc", "libc", "once_cell", - "spin", + "spin 0.5.2", "untrusted", "web-sys", "winapi 0.3.9", @@ -2415,11 +2755,20 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + [[package]] name = "rustls" -version = "0.20.2" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d37e5e2290f3e040b594b1a9e04377c2c671f1a1cfd9bfdef82106ac1c113f84" +checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" dependencies = [ "log", "ring", @@ -2429,18 +2778,18 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "0.2.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" +checksum = "e7522c9de787ff061458fe9a829dc790a3f5b22dc571694fc5883f448b94d9a9" dependencies = [ "base64", ] [[package]] name = "ryu" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" +checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" [[package]] name = "safemem" @@ -2481,12 +2830,12 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" +checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ "lazy_static", - "winapi 0.3.9", + "windows-sys", ] [[package]] @@ -2515,19 +2864,17 @@ dependencies = [ name = "search" version = "0.1.0" dependencies = [ - "ammonia", "config", - "elasticlunr-rs", + "content", "errors", - "lazy_static", - "library", + "libs", ] [[package]] name = "security-framework" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d09d3c15d814eda1d6a836f2f2b56a6abc1446c8a34351cb3180d3db92ffe4ce" +checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" dependencies = [ "bitflags", "core-foundation", @@ -2538,28 +2885,34 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e90dd10c41c6bfc633da6e0c659bd25d31e0791e5974ac42970267d59eba87f7" +checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" dependencies = [ "core-foundation-sys", "libc", ] +[[package]] +name = "semver" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1" + [[package]] name = "serde" -version = "1.0.135" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cf9235533494ea2ddcdb794665461814781c53f19d87b76e571a1c35acbad2b" +checksum = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.135" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dcde03d87d4c973c04be249e7d8f0b35db1c848c487bd43032808e59dd8328d" +checksum = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb" dependencies = [ "proc-macro2", "quote", @@ -2568,12 +2921,12 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.78" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085" +checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" dependencies = [ "indexmap", - "itoa 1.0.1", + "itoa 1.0.2", "ryu", "serde", ] @@ -2585,16 +2938,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.1", + "itoa 1.0.2", "ryu", "serde", ] [[package]] name = "serde_yaml" -version = "0.8.23" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a521f2940385c165a24ee286aa8599633d162077a54bdcae2a6fd5a7bfa7a0" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" dependencies = [ "indexmap", "ryu", @@ -2611,79 +2964,56 @@ dependencies = [ "block-buffer 0.7.3", "digest 0.8.1", "fake-simd", - "opaque-debug 0.2.3", + "opaque-debug", ] [[package]] name = "sha2" -version = "0.9.9" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" dependencies = [ - "block-buffer 0.9.0", "cfg-if 1.0.0", "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.0", + "digest 0.10.3", ] [[package]] -name = "signal-hook-registry" -version = "1.4.0" +name = "similar" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" -dependencies = [ - "libc", -] +checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3" [[package]] name = "siphasher" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a86232ab60fa71287d7f2ddae4a7073f6b7aac33631c3015abb556f08c6d0a3e" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" [[package]] name = "site" version = "0.1.0" dependencies = [ "config", + "console 0.1.0", + "content", "errors", - "front_matter", - "glob", "imageproc", - "lazy_static", - "library", + "libs", "link_checker", "path-slash", - "rayon", - "relative-path", - "sass-rs", "search", "serde", - "serde_derive", - "slotmap", "tempfile", "templates", - "tera", - "url", "utils", - "walkdir", ] [[package]] name = "slab" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" - -[[package]] -name = "slotmap" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1e08e261d0e8f5c43123b7adf3e4ca1690d655377ac93a03b2c9d3e98de1342" -dependencies = [ - "version_check", -] +checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" [[package]] name = "slug" @@ -2696,15 +3026,15 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] name = "socket2" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f82496b90c36d70af5fcd482edaa2e0bd16fade569de1330405fecbbdac736b" +checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" dependencies = [ "libc", "winapi 0.3.9", @@ -2716,6 +3046,15 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +[[package]] +name = "spin" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6002a767bff9e83f8eeecf883ecb8011875a21ae8da43bffb817a57e78cc09" +dependencies = [ + "lock_api", +] + [[package]] name = "static_assertions" version = "1.1.0" @@ -2724,53 +3063,35 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_cache" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "923f0f39b6267d37d23ce71ae7235602134b250ace715dd2c90421998ddac0c6" +checksum = "213494b7a2b503146286049378ce02b482200519accc31872ee8be91fa820a08" dependencies = [ - "lazy_static", "new_debug_unreachable", + "once_cell", "parking_lot", - "phf_shared 0.8.0", + "phf_shared", "precomputed-hash", "serde", ] [[package]] name = "string_cache_codegen" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97" +checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" dependencies = [ - "phf_generator 0.8.0", - "phf_shared 0.8.0", + "phf_generator", + "phf_shared", "proc-macro2", "quote", ] [[package]] name = "strsim" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" - -[[package]] -name = "strum" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaf86bbcfd1fa9670b7a129f64fc0c9fcbbfe4f1bc4210e9e98fe71ffc12cde2" - -[[package]] -name = "strum_macros" -version = "0.21.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d06aaeeee809dbc59eb4556183dd927df67db1540de5be8d3ec0b6636358a5ec" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn", -] +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "svg_metadata" @@ -2786,37 +3107,44 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.86" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" dependencies = [ "proc-macro2", "quote", - "unicode-xid", + "unicode-ident", ] [[package]] name = "syntect" -version = "4.6.0" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b20815bbe80ee0be06e6957450a841185fcf690fe0178f14d77a05ce2caa031" +checksum = "c6c454c27d9d7d9a84c7803aaa3c50cd088d2906fe3c6e42da3209aa623576a8" dependencies = [ "bincode", "bitflags", "flate2", "fnv", "lazy_static", - "lazycell", + "once_cell", "onig", "plist", "regex-syntax", "serde", "serde_derive", "serde_json", + "thiserror", "walkdir", "yaml-rust", ] +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "tar" version = "0.4.38" @@ -2846,34 +3174,22 @@ dependencies = [ name = "templates" version = "0.1.0" dependencies = [ - "base64", "config", - "csv", + "content", "errors", "imageproc", - "lazy_static", - "library", + "libs", + "markdown", "mockito", - "nom-bibtex", - "num-format", - "rendering", - "reqwest", - "serde", - "serde_derive", - "serde_json", - "sha2", "tempfile", - "tera", - "toml", - "url", "utils", ] [[package]] name = "tendril" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9ef557cb397a4f0a5a3a628f06515f78563f2209e64d47055d9dc6052bf5e33" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" dependencies = [ "futf", "mac", @@ -2882,9 +3198,9 @@ dependencies = [ [[package]] name = "tera" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3cac831b615c25bcef632d1cabf864fa05813baad3d526829db18eb70e8b58d" +checksum = "7c9783d6ff395ae80cf17ed9a25360e7ba37742a79fa8fddabb073c5c7c8856d" dependencies = [ "chrono", "chrono-tz", @@ -2894,7 +3210,7 @@ dependencies = [ "percent-encoding", "pest", "pest_derive", - "rand 0.8.4", + "rand 0.8.5", "regex", "serde", "serde_json", @@ -2904,49 +3220,65 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" dependencies = [ "winapi-util", ] +[[package]] +name = "terminal_size" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" +dependencies = [ + "libc", + "winapi 0.3.9", +] + [[package]] name = "test-case" -version = "1.2.1" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196e8a70562e252cc51eaaaee3ecddc39803d9b7fd4a772b7c7dae7cdf42a859" +dependencies = [ + "test-case-macros", +] + +[[package]] +name = "test-case-macros" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7cad0a06f9a61e94355aa3b3dc92d85ab9c83406722b1ca5e918d4297c12c23" +checksum = "8dd461f47ade621665c9f4e44b20449341769911c253275dc5cb03726cbb852c" dependencies = [ "cfg-if 1.0.0", + "proc-macro-error", "proc-macro2", "quote", "syn", - "version_check", ] [[package]] name = "textwrap" -version = "0.11.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] +checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" [[package]] name = "thiserror" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2", "quote", @@ -2955,45 +3287,51 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" dependencies = [ "once_cell", ] [[package]] -name = "tiff" -version = "0.6.1" +name = "threadpool" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a53f4706d65497df0c4349241deddf35f84cee19c87ed86ea8ca590f4464437" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" dependencies = [ - "jpeg-decoder", - "miniz_oxide 0.4.4", - "weezl", + "num_cpus", ] [[package]] -name = "time" -version = "0.1.43" +name = "tiff" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +checksum = "7cfada0986f446a770eca461e8c6566cb879682f7d687c8348aa0c857bd52286" dependencies = [ - "libc", - "winapi 0.3.9", + "flate2", + "jpeg-decoder", + "weezl", ] [[package]] name = "time" -version = "0.3.6" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8d54b9298e05179c335de2b9645d061255bcd5155f843b3e328d2cfe0a5b413" +checksum = "72c91f41dcb2f096c05f0873d667dceec1087ce5bcf984ec8ffb19acddbb3217" dependencies = [ - "itoa 1.0.1", + "itoa 1.0.2", "libc", "num_threads", + "time-macros", ] +[[package]] +name = "time-macros" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792" + [[package]] name = "tinystr" version = "0.3.4" @@ -3002,9 +3340,9 @@ checksum = "29738eedb4388d9ea620eeab9384884fc3f06f586a2eddb56bedc5885126c7c1" [[package]] name = "tinyvec" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" dependencies = [ "tinyvec_macros", ] @@ -3017,34 +3355,22 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.15.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbbf1c778ec206785635ce8ad57fe52b3009ae9e0c9f574a728f3049d3e55838" +checksum = "57aec3cfa4c296db7255446efb4928a6be304b431a806216105542a67b6ca82e" dependencies = [ + "autocfg", "bytes 1.1.0", "libc", "memchr", - "mio 0.7.14", + "mio 0.8.4", "num_cpus", "once_cell", - "parking_lot", "pin-project-lite", - "signal-hook-registry", - "tokio-macros", + "socket2", "winapi 0.3.9", ] -[[package]] -name = "tokio-macros" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "tokio-native-tls" version = "0.3.0" @@ -3057,9 +3383,9 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.23.2" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27d5f2b839802bd8267fa19b0530f5a08b9c08cd417976be2a65d130fe1c11b" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ "rustls", "tokio", @@ -3068,38 +3394,38 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.6.9" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" +checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" dependencies = [ "bytes 1.1.0", "futures-core", "futures-sink", - "log", "pin-project-lite", "tokio", + "tracing", ] [[package]] name = "toml" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ "serde", ] [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.29" +version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" +checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" dependencies = [ "cfg-if 1.0.0", "pin-project-lite", @@ -3108,11 +3434,11 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.21" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" +checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7" dependencies = [ - "lazy_static", + "once_cell", ] [[package]] @@ -3129,15 +3455,15 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "ucd-trie" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +checksum = "89570599c4fe5585de2b388aab47e99f7fa4e9238a1399f707a02e356058141c" [[package]] name = "uncased" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5baeed7327e25054889b9bd4f975f32e5f4c5d434042d59ab6cd4142c0a76ed0" +checksum = "09b01702b0fd0b3fadcf98e098780badda8742d4f4a7676615cad90e8ac73622" dependencies = [ "version_check", ] @@ -3221,36 +3547,30 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.7" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" + +[[package]] +name = "unicode-ident" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" +checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" - -[[package]] -name = "unicode-width" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" - -[[package]] -name = "unicode-xid" -version = "0.2.2" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" [[package]] name = "untrusted" @@ -3281,17 +3601,9 @@ name = "utils" version = "0.1.0" dependencies = [ "errors", - "filetime", - "minify-html", - "percent-encoding", - "regex", + "libs", "serde", - "slug", "tempfile", - "tera", - "toml", - "unicode-segmentation", - "walkdir", ] [[package]] @@ -3300,12 +3612,6 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" -[[package]] -name = "vec_map" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" - [[package]] name = "version_check" version = "0.9.4" @@ -3341,15 +3647,15 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.79" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +checksum = "7c53b543413a17a202f4be280a7e5c62a1c69345f5de525ee64f8cfdbc954994" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -3357,9 +3663,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.79" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +checksum = "5491a68ab4500fa6b4d726bd67408630c3dbe9c4fe7bda16d5c82a1fd8c7340a" dependencies = [ "bumpalo", "lazy_static", @@ -3372,9 +3678,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.29" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb6ec270a31b1d3c7e266b999739109abce8b6c87e4b31fcfcd788b65267395" +checksum = "de9a9cec1733468a8c657e57fa2413d2ae2c0129b95e87c5b72b8ace4d13f31f" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -3384,9 +3690,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.79" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +checksum = "c441e177922bc58f1e12c022624b6216378e5febc2f0533e41ba443d505b80aa" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3394,9 +3700,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.79" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +checksum = "7d94ac45fcf608c1f45ef53e748d35660f168490c10b23704c7779ab8f5c3048" dependencies = [ "proc-macro2", "quote", @@ -3407,15 +3713,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.79" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" +checksum = "6a89911bd99e5f3659ec4acf9c4d93b0a90fe4a2a11f15328472058edc5261be" [[package]] name = "web-sys" -version = "0.3.56" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +checksum = "2fed94beee57daf8dd7d51f2b15dc2bcde92d7a72304cdf662a4371008b71b90" dependencies = [ "js-sys", "wasm-bindgen", @@ -3423,9 +3729,9 @@ dependencies = [ [[package]] name = "webp" -version = "0.1.3" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a598dd8197b16c7569e231619b668380aefe9352daf1d503c3eea7b38fddba3" +checksum = "cf022f821f166079a407d000ab57e84de020e66ffbbf4edde999bc7d6e371cae" dependencies = [ "image", "libwebp-sys", @@ -3443,18 +3749,18 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.2" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "552ceb903e957524388c4d3475725ff2c8b7960922063af6ce53c9a43da07449" +checksum = "f1c760f0d366a6c24a02ed7816e23e691f5d92291f94d15e836006fd11b04daf" dependencies = [ "webpki", ] [[package]] name = "weezl" -version = "0.1.5" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b77fdfd5a253be4ab714e4ffa3c49caf146b4de743e97510c0656cf90f1e8e" +checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" [[package]] name = "winapi" @@ -3499,20 +3805,72 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" +dependencies = [ + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" + +[[package]] +name = "windows_i686_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" + +[[package]] +name = "windows_i686_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" + [[package]] name = "winreg" -version = "0.7.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "winres" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b68db261ef59e9e52806f688020631e987592bd83619edccda9c47d42cde4f6c" +dependencies = [ + "toml", +] + [[package]] name = "ws" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c51a2c47b5798ccc774ffb93ff536aec7c4275d722fd9c740c83cdd1af1f2d94" +checksum = "25fe90c75f236a0a00247d5900226aea4f2d7b05ccc34da9e7a8880ff59b5848" dependencies = [ "byteorder", "bytes 0.4.12", @@ -3536,11 +3894,17 @@ dependencies = [ "winapi-build", ] +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + [[package]] name = "xattr" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c" +checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc" dependencies = [ "libc", ] @@ -3551,18 +3915,6 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3" -[[package]] -name = "xml5ever" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9234163818fd8e2418fcde330655e757900d4236acd8cc70fef345ef91f6d865" -dependencies = [ - "log", - "mac", - "markup5ever", - "time 0.1.43", -] - [[package]] name = "xmlparser" version = "0.13.3" @@ -3586,30 +3938,25 @@ dependencies = [ [[package]] name = "zola" -version = "0.15.3" +version = "0.16.0" dependencies = [ - "atty", - "chrono", "clap", + "clap_complete", + "console 0.1.0", "ctrlc", "errors", - "front_matter", - "globset", "hyper", - "lazy_static", + "libs", + "mime", "mime_guess", "notify", "open", "pathdiff", - "percent-encoding", - "relative-path", "same-file", - "search", - "serde_json", "site", - "termcolor", + "time", "tokio", - "url", "utils", + "winres", "ws", ] diff --git a/Cargo.toml b/Cargo.toml index de0d6e07ed..1e4917ec9c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zola" -version = "0.15.3" +version = "0.16.0" authors = ["Vincent Prouillet "] edition = "2018" license = "MIT" @@ -13,62 +13,50 @@ keywords = ["static", "site", "generator", "blog"] include = ["src/**/*", "LICENSE", "README.md"] [build-dependencies] -clap = "2" +clap = "3" +clap_complete = "3" +winres = "0.1" +time = "0.3" [[bin]] name = "zola" [dependencies] -atty = "0.2.11" -clap = { version = "2", default-features = false } -chrono = "0.4" -lazy_static = "1.1" -termcolor = "1.0.4" -# Used in init to ensure the url given as base_url is a valid one -url = "2" +clap = { version = "3", features = ["derive"] } # Below is for the serve cmd hyper = { version = "0.14.1", default-features = false, features = ["runtime", "server", "http2", "http1"] } tokio = { version = "1.0.1", default-features = false, features = ["rt", "fs", "time"] } -percent-encoding = "2" +time = { version = "0.3", features = ["formatting", "macros", "local-offset"] } notify = "4" ws = "0.9" ctrlc = "3" -open = "2" -globset = "0.4" -relative-path = "1" +open = "3" pathdiff = "0.2" -serde_json = "1.0" # For mimetype detection in serve mode mime_guess = "2.0" +# For essence_str() function, see https://github.com/getzola/zola/issues/1845 +mime = "0.3.16" + site = { path = "components/site" } errors = { path = "components/errors" } -front_matter = { path = "components/front_matter" } +console = { path = "components/console" } utils = { path = "components/utils" } -search = { path = "components/search" } +libs = { path = "components/libs" } + [dev-dependencies] same-file = "1" [features] default = ["rust-tls"] -rust-tls = ["site/rust-tls"] -native-tls = ["site/native-tls"] +rust-tls = ["libs/rust-tls"] +native-tls = ["libs/native-tls"] +indexing-zh = ["libs/indexing-zh"] +indexing-ja = ["libs/indexing-ja"] [workspace] -members = [ - "components/config", - "components/errors", - "components/front_matter", - "components/rendering", - "components/site", - "components/templates", - "components/utils", - "components/search", - "components/imageproc", - "components/link_checker", - "components/library", -] +members = ["components/*"] [profile.release] lto = true @@ -78,3 +66,7 @@ codegen-units = 1 # Disabling debug info speeds up builds a bunch, # and we don't rely on it for debugging that much. debug = 0 + +[package.metadata.winres] +OriginalFilename = "zola.exe" +InternalName = "zola" diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 86e68f43ae..e8b08d0a0e 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -11,7 +11,7 @@ stages: strategy: matrix: windows-stable: - imageName: 'vs2017-win2016' + imageName: 'windows-2019' rustup_toolchain: stable mac-stable: imageName: 'macos-11' @@ -21,7 +21,7 @@ stages: rustup_toolchain: stable linux-pinned: imageName: 'ubuntu-20.04' - rustup_toolchain: 1.53.0 + rustup_toolchain: 1.57.0 pool: vmImage: $(imageName) steps: diff --git a/build.rs b/build.rs index f6e897786b..40f11040bb 100644 --- a/build.rs +++ b/build.rs @@ -2,6 +2,21 @@ include!("src/cli.rs"); +fn generate_pe_header() { + use time::OffsetDateTime; + + let today = OffsetDateTime::now_utc(); + let copyright = format!("Copyright © 2017-{} Vincent Prouillet", today.year()); + let mut res = winres::WindowsResource::new(); + // needed for MinGW cross-compiling + if cfg!(unix) { + res.set_windres_path("x86_64-w64-mingw32-windres"); + } + res.set_icon("docs/static/favicon.ico"); + res.set("LegalCopyright", ©right); + res.compile().expect("Failed to compile Windows resources!"); +} + fn main() { // disabled below as it fails in CI // let mut app = build_cli(); @@ -9,4 +24,12 @@ fn main() { // app.gen_completions("zola", Shell::Fish, "completions/"); // app.gen_completions("zola", Shell::Zsh, "completions/"); // app.gen_completions("zola", Shell::PowerShell, "completions/"); + if std::env::var("CARGO_CFG_TARGET_OS").unwrap() != "windows" + && std::env::var("PROFILE").unwrap() != "release" + { + return; + } + if cfg!(windows) { + generate_pe_header(); + } } diff --git a/components/config/Cargo.toml b/components/config/Cargo.toml index dbf089128d..d7ef4029b6 100644 --- a/components/config/Cargo.toml +++ b/components/config/Cargo.toml @@ -1,19 +1,12 @@ [package] name = "config" version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" +edition = "2021" include = ["src/**/*"] [dependencies] -toml = "0.5" -serde = "1" -serde_derive = "1" -chrono = "0.4" -globset = "0.4" -lazy_static = "1" -syntect = "4" -unic-langid = "0.9" +serde = {version = "1.0", features = ["derive"] } errors = { path = "../errors" } utils = { path = "../utils" } +libs = { path = "../libs" } diff --git a/components/config/examples/generate_sublime.rs b/components/config/examples/generate_sublime.rs index a1946a1532..06a0ba0fbc 100644 --- a/components/config/examples/generate_sublime.rs +++ b/components/config/examples/generate_sublime.rs @@ -3,14 +3,14 @@ //! Although it is a valid example for serializing syntaxes, you probably won't need //! to do this yourself unless you want to cache your own compiled grammars. +use libs::syntect::dumps::*; +use libs::syntect::highlighting::ThemeSet; +use libs::syntect::parsing::SyntaxSetBuilder; use std::collections::HashMap; use std::collections::HashSet; use std::env; use std::iter::FromIterator; use std::path::Path; -use syntect::dumps::*; -use syntect::highlighting::ThemeSet; -use syntect::parsing::SyntaxSetBuilder; fn usage_and_exit() -> ! { println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n diff --git a/components/config/src/config/languages.rs b/components/config/src/config/languages.rs index 9c532b4f98..f7b33288fa 100644 --- a/components/config/src/config/languages.rs +++ b/components/config/src/config/languages.rs @@ -1,13 +1,13 @@ use std::collections::HashMap; use errors::{bail, Result}; -use serde_derive::{Deserialize, Serialize}; -use unic_langid::LanguageIdentifier; +use libs::unic_langid::LanguageIdentifier; +use serde::{Deserialize, Serialize}; use crate::config::search; use crate::config::taxonomies; -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] #[serde(default)] pub struct LanguageOptions { /// Title of the site. Defaults to None @@ -19,33 +19,17 @@ pub struct LanguageOptions { /// The filename to use for feeds. Used to find the template, too. /// Defaults to "atom.xml", with "rss.xml" also having a template provided out of the box. pub feed_filename: String, - pub taxonomies: Vec, + pub taxonomies: Vec, /// Whether to generate search index for that language, defaults to `false` pub build_search_index: bool, /// The search config, telling what to include in the search index for that language pub search: search::Search, /// A toml crate `Table` with String key representing term and value /// another `String` representing its translation. - /// /// Use `get_translation()` method for translating key into different languages. pub translations: HashMap, } -impl Default for LanguageOptions { - fn default() -> Self { - LanguageOptions { - title: None, - description: None, - generate_feed: false, - feed_filename: String::new(), - build_search_index: false, - taxonomies: Vec::new(), - search: search::Search::default(), - translations: HashMap::new(), - } - } -} - /// We want to ensure the language codes are valid ones pub fn validate_code(code: &str) -> Result<()> { if LanguageIdentifier::from_bytes(code.as_bytes()).is_err() { diff --git a/components/config/src/config/link_checker.rs b/components/config/src/config/link_checker.rs index 2e81083bbd..8501287981 100644 --- a/components/config/src/config/link_checker.rs +++ b/components/config/src/config/link_checker.rs @@ -1,4 +1,18 @@ -use serde_derive::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum LinkCheckerLevel { + #[serde(rename = "error")] + Error, + #[serde(rename = "warn")] + Warn, +} + +impl Default for LinkCheckerLevel { + fn default() -> Self { + Self::Error + } +} #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] #[serde(default)] @@ -7,5 +21,8 @@ pub struct LinkChecker { pub skip_prefixes: Vec, /// Skip anchor checking for these URL prefixes pub skip_anchor_prefixes: Vec, + /// Emit either "error" or "warn" for broken internal links (including anchor links). + pub internal_level: LinkCheckerLevel, + /// Emit either "error" or "warn" for broken external links (including anchor links). + pub external_level: LinkCheckerLevel, } - diff --git a/components/config/src/config/markup.rs b/components/config/src/config/markup.rs index 2b250cdab6..9b80ed8935 100644 --- a/components/config/src/config/markup.rs +++ b/components/config/src/config/markup.rs @@ -1,11 +1,11 @@ use std::{path::Path, sync::Arc}; -use serde_derive::{Deserialize, Serialize}; -use syntect::{ +use libs::syntect::{ highlighting::{Theme, ThemeSet}, html::css_for_theme_with_class_style, parsing::{SyntaxSet, SyntaxSetBuilder}, }; +use serde::{Deserialize, Serialize}; use errors::{bail, Result}; @@ -13,7 +13,7 @@ use crate::highlighting::{CLASS_STYLE, THEME_SET}; pub const DEFAULT_HIGHLIGHT_THEME: &str = "base16-ocean-dark"; -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] #[serde(default)] pub struct ThemeCss { /// Which theme are we generating the CSS from @@ -22,12 +22,6 @@ pub struct ThemeCss { pub filename: String, } -impl Default for ThemeCss { - fn default() -> ThemeCss { - ThemeCss { theme: String::new(), filename: String::new() } - } -} - #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(default)] pub struct Markdown { @@ -66,16 +60,16 @@ impl Markdown { if self.highlight_theme == "css" { None } else { - Some(self.get_highlight_theme_by_name(&self.highlight_theme)) + self.get_highlight_theme_by_name(&self.highlight_theme) } } /// Gets an arbitrary theme from the THEME_SET or the extra_theme_set - pub fn get_highlight_theme_by_name(&self, theme_name: &str) -> &Theme { + pub fn get_highlight_theme_by_name(&self, theme_name: &str) -> Option<&Theme> { (*self.extra_theme_set) .as_ref() .and_then(|ts| ts.themes.get(theme_name)) - .unwrap_or_else(|| &THEME_SET.themes[theme_name]) + .or_else(|| THEME_SET.themes.get(theme_name)) } /// Attempt to load any extra syntaxes and themes found in the extra_syntaxes_and_themes folders @@ -101,9 +95,13 @@ impl Markdown { )) } - pub fn export_theme_css(&self, theme_name: &str) -> String { - let theme = self.get_highlight_theme_by_name(theme_name); - css_for_theme_with_class_style(theme, CLASS_STYLE) + pub fn export_theme_css(&self, theme_name: &str) -> Result { + if let Some(theme) = self.get_highlight_theme_by_name(theme_name) { + Ok(css_for_theme_with_class_style(theme, CLASS_STYLE) + .expect("the function can't even error?")) + } else { + bail!("Theme {} not found", theme_name) + } } pub fn init_extra_syntaxes_and_highlight_themes(&mut self, path: &Path) -> Result<()> { @@ -114,14 +112,14 @@ impl Markdown { self.extra_syntax_set = Some(extra_syntax_set); } - if self.highlight_theme == "css" { - return Ok(()); - } - if let Some(extra_theme_set) = loaded_extra_highlight_themes { self.extra_theme_set = Arc::new(Some(extra_theme_set)); } + if self.highlight_theme == "css" { + return Ok(()); + } + // Validate that the chosen highlight_theme exists in the loaded highlight theme sets if !THEME_SET.themes.contains_key(&self.highlight_theme) { if let Some(extra) = &*self.extra_theme_set { diff --git a/components/config/src/config/mod.rs b/components/config/src/config/mod.rs index 2e291e3434..8450675677 100644 --- a/components/config/src/config/mod.rs +++ b/components/config/src/config/mod.rs @@ -8,13 +8,14 @@ pub mod taxonomies; use std::collections::HashMap; use std::path::{Path, PathBuf}; -use globset::{Glob, GlobSet, GlobSetBuilder}; -use serde_derive::{Deserialize, Serialize}; -use toml::Value as Toml; +use libs::globset::{Glob, GlobSet, GlobSetBuilder}; +use libs::toml::Value as Toml; +use serde::{Deserialize, Serialize}; use crate::theme::Theme; -use errors::{bail, Error, Result}; +use errors::{anyhow, bail, Result}; use utils::fs::read_file; +use utils::slugs::slugify_paths; // We want a default base url for tests static DEFAULT_BASE_URL: &str = "http://a-website.com"; @@ -55,8 +56,7 @@ pub struct Config { pub feed_filename: String, /// If set, files from static/ will be hardlinked instead of copied to the output dir. pub hard_link_static: bool, - - pub taxonomies: Vec, + pub taxonomies: Vec, /// Whether to compile the `sass` directory and output the css files into the static folder pub compile_sass: bool, @@ -99,7 +99,7 @@ pub struct SerializedConfig<'a> { default_language: &'a str, generate_feed: bool, feed_filename: &'a str, - taxonomies: &'a [taxonomies::Taxonomy], + taxonomies: &'a [taxonomies::TaxonomyConfig], build_search_index: bool, extra: &'a HashMap, } @@ -109,7 +109,7 @@ impl Config { /// Parses a string containing TOML to our Config struct /// Any extra parameter will end up in the extra field pub fn parse(content: &str) -> Result { - let mut config: Config = match toml::from_str(content) { + let mut config: Config = match libs::toml::from_str(content) { Ok(c) => c, Err(e) => bail!(e), }; @@ -124,6 +124,7 @@ impl Config { } config.add_default_language(); + config.slugify_taxonomies(); if !config.ignored_content.is_empty() { // Convert the file glob strings into a compiled glob set matcher. We want to do this once, @@ -149,19 +150,19 @@ impl Config { pub fn default_for_test() -> Self { let mut config = Config::default(); config.add_default_language(); + config.slugify_taxonomies(); config } /// Parses a config file from the given path pub fn from_file>(path: P) -> Result { let path = path.as_ref(); - let content = - read_file(path).map_err(|e| errors::Error::chain("Failed to load config", e))?; + let content = read_file(path)?; let mut config = Config::parse(&content)?; let config_dir = path .parent() - .ok_or_else(|| Error::msg("Failed to find directory containing the config file."))?; + .ok_or_else(|| anyhow!("Failed to find directory containing the config file."))?; // this is the step at which missing extra syntax and highlighting themes are raised as errors config.markdown.init_extra_syntaxes_and_highlight_themes(config_dir)?; @@ -169,6 +170,14 @@ impl Config { Ok(config) } + pub fn slugify_taxonomies(&mut self) { + for (_, lang_options) in self.languages.iter_mut() { + for tax_def in lang_options.taxonomies.iter_mut() { + tax_def.slug = slugify_paths(&tax_def.name, self.slugify.taxonomies); + } + } + } + /// Makes a url, taking into account that the base url might have a trailing slash pub fn make_permalink(&self, path: &str) -> String { let trailing_bit = @@ -247,6 +256,10 @@ impl Config { others } + pub fn other_languages_codes(&self) -> Vec<&str> { + self.languages.keys().filter(|k| *k != &self.default_language).map(|k| k.as_str()).collect() + } + /// Is this site using i18n? pub fn is_multilingual(&self) -> bool { !self.other_languages().is_empty() @@ -272,10 +285,7 @@ impl Config { .translations .get(key) .ok_or_else(|| { - Error::msg(format!( - "Translation key '{}' for language '{}' is missing", - key, lang - )) + anyhow!("Translation key '{}' for language '{}' is missing", key, lang) }) .map(|term| term.to_string()) } else { @@ -283,6 +293,14 @@ impl Config { } } + pub fn has_taxonomy(&self, name: &str, lang: &str) -> bool { + if let Some(lang_options) = self.languages.get(lang) { + lang_options.taxonomies.iter().any(|t| t.name == name) + } else { + false + } + } + pub fn serialize(&self, lang: &str) -> SerializedConfig { let options = &self.languages[lang]; @@ -325,7 +343,7 @@ pub fn merge(into: &mut Toml, from: &Toml) -> Result<()> { } _ => { // Trying to merge a table with something else - Err(Error::msg(&format!("Cannot merge config.toml with theme.toml because the following values have incompatibles types:\n- {}\n - {}", into, from))) + Err(anyhow!("Cannot merge config.toml with theme.toml because the following values have incompatibles types:\n- {}\n - {}", into, from)) } } } @@ -561,21 +579,28 @@ ignored_content = [] let config_str = r#" title = "My site" base_url = "example.com" -ignored_content = ["*.{graphml,iso}", "*.py?"] +ignored_content = ["*.{graphml,iso}", "*.py?", "**/{target,temp_folder}"] "#; let config = Config::parse(config_str).unwrap(); let v = config.ignored_content; - assert_eq!(v, vec!["*.{graphml,iso}", "*.py?"]); + assert_eq!(v, vec!["*.{graphml,iso}", "*.py?", "**/{target,temp_folder}"]); let g = config.ignored_content_globset.unwrap(); - assert_eq!(g.len(), 2); + assert_eq!(g.len(), 3); assert!(g.is_match("foo.graphml")); + assert!(g.is_match("foo/bar/foo.graphml")); assert!(g.is_match("foo.iso")); assert!(!g.is_match("foo.png")); assert!(g.is_match("foo.py2")); assert!(g.is_match("foo.py3")); assert!(!g.is_match("foo.py")); + assert!(g.is_match("foo/bar/target")); + assert!(g.is_match("foo/bar/baz/temp_folder")); + assert!(g.is_match("foo/bar/baz/temp_folder/target")); + assert!(g.is_match("temp_folder")); + assert!(g.is_match("my/isos/foo.iso")); + assert!(g.is_match("content/poetry/zen.py2")); } #[test] @@ -652,7 +677,7 @@ bar = "baz" "#; let theme = Theme::parse(theme_str).unwrap(); // We expect an error here - assert!(!config.add_theme_extra(&theme).is_ok()); + assert!(config.add_theme_extra(&theme).is_err()); } #[test] @@ -689,7 +714,7 @@ highlight_theme = "asdf" "#; let config = Config::parse(config); - assert_eq!(config.is_err(), true); + assert!(config.is_err()); } #[test] @@ -703,7 +728,7 @@ highlight_themes_css = [ "#; let config = Config::parse(config); - assert_eq!(config.is_err(), true); + assert!(config.is_err()); } // https://github.com/getzola/zola/issues/1687 diff --git a/components/config/src/config/search.rs b/components/config/src/config/search.rs index 71ff325d74..aa36c46e76 100644 --- a/components/config/src/config/search.rs +++ b/components/config/src/config/search.rs @@ -1,4 +1,4 @@ -use serde_derive::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(default)] diff --git a/components/config/src/config/slugify.rs b/components/config/src/config/slugify.rs index dae4be61ef..c22065b34d 100644 --- a/components/config/src/config/slugify.rs +++ b/components/config/src/config/slugify.rs @@ -1,4 +1,4 @@ -use serde_derive::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize}; use utils::slugs::SlugifyStrategy; diff --git a/components/config/src/config/taxonomies.rs b/components/config/src/config/taxonomies.rs index ac93a7f4b7..6638471dac 100644 --- a/components/config/src/config/taxonomies.rs +++ b/components/config/src/config/taxonomies.rs @@ -1,19 +1,36 @@ -use serde_derive::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize}; -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(default)] -pub struct Taxonomy { +pub struct TaxonomyConfig { /// The name used in the URL, usually the plural pub name: String, + /// The slug according to the config slugification strategy + pub slug: String, /// If this is set, the list of individual taxonomy term page will be paginated /// by this much pub paginate_by: Option, pub paginate_path: Option, - /// Whether to generate a feed only for each taxonomy term, defaults to false + /// Whether the taxonomy will be rendered, defaults to `true` + pub render: bool, + /// Whether to generate a feed only for each taxonomy term, defaults to `false` pub feed: bool, } -impl Taxonomy { +impl Default for TaxonomyConfig { + fn default() -> Self { + Self { + name: String::new(), + slug: String::new(), + paginate_by: None, + paginate_path: None, + render: true, + feed: false, + } + } +} + +impl TaxonomyConfig { pub fn is_paginated(&self) -> bool { if let Some(paginate_by) = self.paginate_by { paginate_by > 0 diff --git a/components/config/src/highlighting.rs b/components/config/src/highlighting.rs index 40e778a30b..03cfc95d19 100644 --- a/components/config/src/highlighting.rs +++ b/components/config/src/highlighting.rs @@ -1,22 +1,18 @@ -use lazy_static::lazy_static; -use syntect::dumps::from_binary; -use syntect::highlighting::{Theme, ThemeSet}; -use syntect::html::ClassStyle; -use syntect::parsing::{SyntaxReference, SyntaxSet}; +use libs::once_cell::sync::Lazy; +use libs::syntect::dumps::from_binary; +use libs::syntect::highlighting::{Theme, ThemeSet}; +use libs::syntect::html::ClassStyle; +use libs::syntect::parsing::{SyntaxReference, SyntaxSet}; use crate::config::Config; pub const CLASS_STYLE: ClassStyle = ClassStyle::SpacedPrefixed { prefix: "z-" }; -lazy_static! { - pub static ref SYNTAX_SET: SyntaxSet = { - let ss: SyntaxSet = - from_binary(include_bytes!("../../../sublime/syntaxes/newlines.packdump")); - ss - }; - pub static ref THEME_SET: ThemeSet = - from_binary(include_bytes!("../../../sublime/themes/all.themedump")); -} +pub static SYNTAX_SET: Lazy = + Lazy::new(|| from_binary(include_bytes!("../../../sublime/syntaxes/newlines.packdump"))); + +pub static THEME_SET: Lazy = + Lazy::new(|| from_binary(include_bytes!("../../../sublime/themes/all.themedump"))); #[derive(Clone, Debug, PartialEq, Eq)] pub enum HighlightSource { diff --git a/components/config/src/lib.rs b/components/config/src/lib.rs index 4f85fde224..59c6568b74 100644 --- a/components/config/src/lib.rs +++ b/components/config/src/lib.rs @@ -2,14 +2,14 @@ mod config; pub mod highlighting; mod theme; +use std::path::Path; + pub use crate::config::{ - languages::LanguageOptions, link_checker::LinkChecker, search::Search, slugify::Slugify, - taxonomies::Taxonomy, Config, + languages::LanguageOptions, link_checker::LinkChecker, link_checker::LinkCheckerLevel, + search::Search, slugify::Slugify, taxonomies::TaxonomyConfig, Config, }; use errors::Result; -use std::path::Path; - /// Get and parse the config. /// If it doesn't succeed, exit pub fn get_config(filename: &Path) -> Result { diff --git a/components/config/src/theme.rs b/components/config/src/theme.rs index 7daa24864e..b2a6ac5114 100644 --- a/components/config/src/theme.rs +++ b/components/config/src/theme.rs @@ -1,10 +1,10 @@ use std::collections::HashMap; use std::path::Path; -use serde_derive::{Deserialize, Serialize}; -use toml::Value as Toml; +use libs::toml::Value as Toml; +use serde::{Deserialize, Serialize}; -use errors::{bail, Result}; +use errors::{bail, Context, Result}; use utils::fs::read_file; /// Holds the data from a `theme.toml` file. @@ -40,8 +40,8 @@ impl Theme { /// Parses a theme file from the given path pub fn from_file(path: &Path, theme_name: &str) -> Result { - let content = read_file(path) - .map_err(|e| errors::Error::chain(format!("Failed to load theme {}", theme_name), e))?; + let content = + read_file(path).with_context(|| format!("Failed to load theme {}", theme_name))?; Theme::parse(&content) } } diff --git a/components/console/Cargo.toml b/components/console/Cargo.toml new file mode 100644 index 0000000000..931400e669 --- /dev/null +++ b/components/console/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "console" +version = "0.1.0" +edition = "2021" + +[dependencies] +errors = { path = "../errors" } +libs = { path = "../libs" } diff --git a/components/console/src/lib.rs b/components/console/src/lib.rs new file mode 100644 index 0000000000..e480c8f3f9 --- /dev/null +++ b/components/console/src/lib.rs @@ -0,0 +1,57 @@ +use std::env; +use std::io::Write; + +use libs::atty; +use libs::once_cell::sync::Lazy; +use libs::termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; + +/// Termcolor color choice. +/// We do not rely on ColorChoice::Auto behavior +/// as the check is already performed by has_color. +static COLOR_CHOICE: Lazy = + Lazy::new(|| if has_color() { ColorChoice::Always } else { ColorChoice::Never }); + +pub fn info(message: &str) { + colorize(message, ColorSpec::new().set_bold(true), StandardStream::stdout(*COLOR_CHOICE)); +} + +pub fn warn(message: &str) { + colorize( + &format!("{}{}", "Warning: ", message), + ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)), + StandardStream::stdout(*COLOR_CHOICE), + ); +} + +pub fn success(message: &str) { + colorize( + message, + ColorSpec::new().set_bold(true).set_fg(Some(Color::Green)), + StandardStream::stdout(*COLOR_CHOICE), + ); +} + +pub fn error(message: &str) { + colorize( + &format!("{}{}", "Error: ", message), + ColorSpec::new().set_bold(true).set_fg(Some(Color::Red)), + StandardStream::stderr(*COLOR_CHOICE), + ); +} + +/// Print a colorized message to stdout +fn colorize(message: &str, color: &ColorSpec, mut stream: StandardStream) { + stream.set_color(color).unwrap(); + write!(stream, "{}", message).unwrap(); + stream.set_color(&ColorSpec::new()).unwrap(); + writeln!(stream).unwrap(); +} + +/// Check whether to output colors +fn has_color() -> bool { + let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0" + && env::var("NO_COLOR").is_err(); + let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_| "0".to_string()) != "0"; + + force_colors || use_colors && atty::is(atty::Stream::Stdout) +} diff --git a/components/content/Cargo.toml b/components/content/Cargo.toml new file mode 100644 index 0000000000..9a958badcf --- /dev/null +++ b/components/content/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "content" +version = "0.1.0" +edition = "2021" + +[dependencies] +serde = {version = "1.0", features = ["derive"] } +time = { version = "0.3", features = ["macros"] } + +errors = { path = "../errors" } +utils = { path = "../utils" } +libs = { path = "../libs" } +config = { path = "../config" } + +# TODO: remove it? +markdown = { path = "../markdown" } + +[dev-dependencies] +test-case = "2" # TODO: can we solve that usecase in src/page.rs in a simpler way? A custom macro_rules! maybe +tempfile = "3.3.0" diff --git a/components/library/src/content/file_info.rs b/components/content/src/file_info.rs similarity index 84% rename from components/library/src/content/file_info.rs rename to components/content/src/file_info.rs index 606d1d462a..941d8ea129 100644 --- a/components/library/src/content/file_info.rs +++ b/components/content/src/file_info.rs @@ -1,6 +1,5 @@ use std::path::{Path, PathBuf}; -use config::Config; use errors::{bail, Result}; /// Takes a full path to a file and returns only the components after the first `content` directory @@ -115,14 +114,18 @@ impl FileInfo { /// Look for a language in the filename. /// If a language has been found, update the name of the file in this struct to /// remove it and return the language code - pub fn find_language(&mut self, config: &Config) -> Result { + pub fn find_language( + &mut self, + default_language: &str, + other_languages: &[&str], + ) -> Result { // No languages? Nothing to do - if !config.is_multilingual() { - return Ok(config.default_language.clone()); + if other_languages.is_empty() { + return Ok(default_language.to_owned()); } if !self.name.contains('.') { - return Ok(config.default_language.clone()); + return Ok(default_language.to_owned()); } // Go with the assumption that no one is using `.` in filenames when using i18n @@ -130,13 +133,13 @@ impl FileInfo { let mut parts: Vec = self.name.splitn(2, '.').map(|s| s.to_string()).collect(); // If language code is same as default language, go for default - if config.default_language == parts[1].as_str() { - return Ok(config.default_language.clone()); + if default_language == parts[1].as_str() { + return Ok(default_language.to_owned()); } // The language code is not present in the config: typo or the user forgot to add it to the // config - if !config.other_languages().contains_key(&parts[1].as_ref()) { + if !other_languages.contains(&parts[1].as_ref()) { bail!("File {:?} has a language code of {} which isn't present in the config.toml `languages`", self.path, parts[1]); } @@ -152,8 +155,6 @@ impl FileInfo { mod tests { use std::path::{Path, PathBuf}; - use config::{Config, LanguageOptions}; - use super::{find_content_components, FileInfo}; #[test] @@ -183,77 +184,66 @@ mod tests { #[test] fn can_find_valid_language_in_page() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!(res.unwrap(), "fr"); } #[test] fn can_find_valid_language_with_default_locale() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.en.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); - assert_eq!(res.unwrap(), config.default_language); + assert_eq!(res.unwrap(), "en"); } #[test] fn can_find_valid_language_in_page_with_assets() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"), &PathBuf::new(), ); assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!(res.unwrap(), "fr"); } #[test] fn do_nothing_on_unknown_language_in_page_with_i18n_off() { - let config = Config::default(); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &[]); assert!(res.is_ok()); - assert_eq!(res.unwrap(), config.default_language); + assert_eq!(res.unwrap(), "en"); } #[test] fn errors_on_unknown_language_in_page_with_i18n_on() { - let mut config = Config::default(); - config.languages.insert("it".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["it"]); assert!(res.is_err()); } #[test] fn can_find_valid_language_in_section() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_section( Path::new("/home/vincent/code/site/content/posts/tutorials/_index.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!(res.unwrap(), "fr"); } @@ -274,13 +264,11 @@ mod tests { /// Regression test for https://github.com/getzola/zola/issues/854 #[test] fn correct_canonical_after_find_language() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!( file.canonical, diff --git a/components/content/src/front_matter/mod.rs b/components/content/src/front_matter/mod.rs new file mode 100644 index 0000000000..94c5f51dd0 --- /dev/null +++ b/components/content/src/front_matter/mod.rs @@ -0,0 +1,7 @@ +mod page; +mod section; +mod split; + +pub use page::PageFrontMatter; +pub use section::SectionFrontMatter; +pub use split::{split_page_content, split_section_content}; diff --git a/components/front_matter/src/page.rs b/components/content/src/front_matter/page.rs similarity index 87% rename from components/front_matter/src/page.rs rename to components/content/src/front_matter/page.rs index 7d2b7351f2..8213b6712e 100644 --- a/components/front_matter/src/page.rs +++ b/components/content/src/front_matter/page.rs @@ -1,13 +1,15 @@ use std::collections::HashMap; -use chrono::prelude::*; -use serde_derive::Deserialize; -use tera::{Map, Value}; +use libs::tera::{Map, Value}; +use serde::Deserialize; +use time::format_description::well_known::Rfc3339; +use time::macros::{format_description, time}; +use time::{Date, OffsetDateTime, PrimitiveDateTime}; use errors::{bail, Result}; use utils::de::{fix_toml_dates, from_toml_datetime}; -use crate::RawFrontMatter; +use crate::front_matter::split::RawFrontMatter; /// The front matter of every page #[derive(Debug, Clone, PartialEq, Deserialize)] @@ -20,21 +22,21 @@ pub struct PageFrontMatter { /// Updated date #[serde(default, deserialize_with = "from_toml_datetime")] pub updated: Option, - /// Chrono converted update datatime + /// Datetime content was last updated #[serde(default, skip_deserializing)] - pub updated_datetime: Option, + pub updated_datetime: Option, /// The converted update datetime into a (year, month, day) tuple #[serde(default, skip_deserializing)] - pub updated_datetime_tuple: Option<(i32, u32, u32)>, + pub updated_datetime_tuple: Option<(i32, u8, u8)>, /// Date if we want to order pages (ie blog post) #[serde(default, deserialize_with = "from_toml_datetime")] pub date: Option, - /// Chrono converted datetime + /// Datetime content was created #[serde(default, skip_deserializing)] - pub datetime: Option, + pub datetime: Option, /// The converted date into a (year, month, day) tuple #[serde(default, skip_deserializing)] - pub datetime_tuple: Option<(i32, u32, u32)>, + pub datetime_tuple: Option<(i32, u8, u8)>, /// Whether this page is a draft pub draft: bool, /// The page slug. Will be used instead of the filename if present @@ -68,11 +70,13 @@ pub struct PageFrontMatter { /// 2. a local datetime (RFC3339 with timezone omitted) /// 3. a local date (YYYY-MM-DD). /// This tries each in order. -fn parse_datetime(d: &str) -> Option { - DateTime::parse_from_rfc3339(d) - .or_else(|_| DateTime::parse_from_rfc3339(format!("{}Z", d).as_ref())) - .map(|s| s.naive_local()) - .or_else(|_| NaiveDate::parse_from_str(d, "%Y-%m-%d").map(|s| s.and_hms(0, 0, 0))) +fn parse_datetime(d: &str) -> Option { + OffsetDateTime::parse(d, &Rfc3339) + .or_else(|_| OffsetDateTime::parse(format!("{}Z", d).as_ref(), &Rfc3339)) + .or_else(|_| match Date::parse(d, &format_description!("[year]-[month]-[day]")) { + Ok(date) => Ok(PrimitiveDateTime::new(date, time!(0:00)).assume_utc()), + Err(e) => Err(e), + }) .ok() } @@ -108,15 +112,15 @@ impl PageFrontMatter { Ok(f) } - /// Converts the TOML datetime to a Chrono naive datetime + /// Converts the TOML datetime to a time::OffsetDateTime /// Also grabs the year/month/day tuple that will be used in serialization pub fn date_to_datetime(&mut self) { self.datetime = self.date.as_ref().map(|s| s.as_ref()).and_then(parse_datetime); - self.datetime_tuple = self.datetime.map(|dt| (dt.year(), dt.month(), dt.day())); + self.datetime_tuple = self.datetime.map(|dt| (dt.year(), dt.month().into(), dt.day())); self.updated_datetime = self.updated.as_ref().map(|s| s.as_ref()).and_then(parse_datetime); self.updated_datetime_tuple = - self.updated_datetime.map(|dt| (dt.year(), dt.month(), dt.day())); + self.updated_datetime.map(|dt| (dt.year(), dt.month().into(), dt.day())); } pub fn weight(&self) -> usize { @@ -127,6 +131,7 @@ impl PageFrontMatter { impl Default for PageFrontMatter { fn default() -> PageFrontMatter { PageFrontMatter { + in_search_index: true, title: None, description: None, updated: None, @@ -141,7 +146,6 @@ impl Default for PageFrontMatter { taxonomies: HashMap::new(), weight: None, aliases: Vec::new(), - in_search_index: true, template: None, extra: Map::new(), } @@ -150,10 +154,11 @@ impl Default for PageFrontMatter { #[cfg(test)] mod tests { - use super::PageFrontMatter; - use super::RawFrontMatter; - use tera::to_value; + use crate::front_matter::page::PageFrontMatter; + use crate::front_matter::split::RawFrontMatter; + use libs::tera::to_value; use test_case::test_case; + use time::macros::datetime; #[test_case(&RawFrontMatter::Toml(r#" "#); "toml")] #[test_case(&RawFrontMatter::Toml(r#" "#); "yaml")] @@ -229,6 +234,7 @@ date: 2016-10-10 fn can_parse_date_yyyy_mm_dd(content: &RawFrontMatter) { let res = PageFrontMatter::parse(content).unwrap(); assert!(res.datetime.is_some()); + assert_eq!(res.datetime.unwrap(), datetime!(2016 - 10 - 10 0:00 UTC)); } #[test_case(&RawFrontMatter::Toml(r#" @@ -244,6 +250,7 @@ date: 2002-10-02T15:00:00Z fn can_parse_date_rfc3339(content: &RawFrontMatter) { let res = PageFrontMatter::parse(content).unwrap(); assert!(res.datetime.is_some()); + assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00 UTC)); } #[test_case(&RawFrontMatter::Toml(r#" @@ -259,6 +266,7 @@ date: 2002-10-02T15:00:00 fn can_parse_date_rfc3339_without_timezone(content: &RawFrontMatter) { let res = PageFrontMatter::parse(content).unwrap(); assert!(res.datetime.is_some()); + assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00 UTC)); } #[test_case(&RawFrontMatter::Toml(r#" @@ -274,6 +282,7 @@ date: 2002-10-02 15:00:00+02:00 fn can_parse_date_rfc3339_with_space(content: &RawFrontMatter) { let res = PageFrontMatter::parse(content).unwrap(); assert!(res.datetime.is_some()); + assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00+02:00)); } #[test_case(&RawFrontMatter::Toml(r#" @@ -289,6 +298,7 @@ date: 2002-10-02 15:00:00 fn can_parse_date_rfc3339_with_space_without_timezone(content: &RawFrontMatter) { let res = PageFrontMatter::parse(content).unwrap(); assert!(res.datetime.is_some()); + assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00 UTC)); } #[test_case(&RawFrontMatter::Toml(r#" @@ -304,6 +314,7 @@ date: 2002-10-02T15:00:00.123456Z fn can_parse_date_rfc3339_with_microseconds(content: &RawFrontMatter) { let res = PageFrontMatter::parse(content).unwrap(); assert!(res.datetime.is_some()); + assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00.123456 UTC)); } #[test_case(&RawFrontMatter::Toml(r#" @@ -349,6 +360,8 @@ date: "2016-10-10" fn can_parse_valid_date_as_string(content: &RawFrontMatter) { let res = PageFrontMatter::parse(content).unwrap(); assert!(res.date.is_some()); + assert!(res.datetime.is_some()); + assert_eq!(res.datetime.unwrap(), datetime!(2016 - 10 - 10 0:00 UTC)); } #[test_case(&RawFrontMatter::Toml(r#" diff --git a/components/front_matter/src/section.rs b/components/content/src/front_matter/section.rs similarity index 96% rename from components/front_matter/src/section.rs rename to components/content/src/front_matter/section.rs index 063ba01451..9a26697329 100644 --- a/components/front_matter/src/section.rs +++ b/components/content/src/front_matter/section.rs @@ -1,11 +1,12 @@ -use serde_derive::{Deserialize, Serialize}; -use tera::{Map, Value}; +use libs::tera::{Map, Value}; +use serde::{Deserialize, Serialize}; -use super::{InsertAnchor, SortBy}; use errors::Result; use utils::de::fix_toml_dates; +use utils::types::InsertAnchor; -use crate::RawFrontMatter; +use crate::front_matter::split::RawFrontMatter; +use crate::SortBy; static DEFAULT_PAGINATE_PATH: &str = "page"; @@ -58,7 +59,6 @@ pub struct SectionFrontMatter { /// Whether the section should pass its pages on to the parent section. Defaults to `false`. /// Useful when the section shouldn't split up the parent section, like /// sections for each year under a posts section. - #[serde(skip_serializing)] pub transparent: bool, /// Optional template for all pages in this section (including the pages of children section) #[serde(skip_serializing)] diff --git a/components/front_matter/src/lib.rs b/components/content/src/front_matter/split.rs similarity index 78% rename from components/front_matter/src/lib.rs rename to components/content/src/front_matter/split.rs index ddfcd2b68b..d7aede9cf0 100644 --- a/components/front_matter/src/lib.rs +++ b/components/content/src/front_matter/split.rs @@ -1,26 +1,24 @@ -use lazy_static::lazy_static; -use serde_derive::{Deserialize, Serialize}; - -use errors::{bail, Error, Result}; -use regex::Regex; use std::path::Path; -mod page; -mod section; +use errors::{bail, Context, Result}; +use libs::once_cell::sync::Lazy; +use libs::regex::Regex; +use libs::{serde_yaml, toml}; -pub use page::PageFrontMatter; -pub use section::SectionFrontMatter; +use crate::front_matter::page::PageFrontMatter; +use crate::front_matter::section::SectionFrontMatter; -lazy_static! { - static ref TOML_RE: Regex = Regex::new( - r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))" - ) - .unwrap(); - static ref YAML_RE: Regex = Regex::new( - r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))" +static TOML_RE: Lazy = Lazy::new(|| { + Regex::new( + r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))", ) - .unwrap(); -} + .unwrap() +}); + +static YAML_RE: Lazy = Lazy::new(|| { + Regex::new(r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))") + .unwrap() +}); pub enum RawFrontMatter<'a> { Toml(&'a str), @@ -28,7 +26,7 @@ pub enum RawFrontMatter<'a> { } impl RawFrontMatter<'_> { - fn deserialize(&self) -> Result + pub(crate) fn deserialize(&self) -> Result where T: serde::de::DeserializeOwned, { @@ -36,36 +34,13 @@ impl RawFrontMatter<'_> { RawFrontMatter::Toml(s) => toml::from_str(s)?, RawFrontMatter::Yaml(s) => match serde_yaml::from_str(s) { Ok(d) => d, - Err(e) => bail!(format!("YAML deserialize error: {:?}", e)), + Err(e) => bail!("YAML deserialize error: {:?}", e), }, }; Ok(f) } } -#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum SortBy { - /// Most recent to oldest - Date, - /// Most recent to oldest - UpdateDate, - /// Sort by title - Title, - /// Lower weight comes first - Weight, - /// No sorting - None, -} - -#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum InsertAnchor { - Left, - Right, - None, -} - /// Split a file between the front matter and its content /// Will return an error if the front matter wasn't found fn split_content<'c>(file_path: &Path, content: &'c str) -> Result<(RawFrontMatter<'c>, &'c str)> { @@ -102,12 +77,10 @@ pub fn split_section_content<'c>( content: &'c str, ) -> Result<(SectionFrontMatter, &'c str)> { let (front_matter, content) = split_content(file_path, content)?; - let meta = SectionFrontMatter::parse(&front_matter).map_err(|e| { - Error::chain( - format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()), - e, - ) + let meta = SectionFrontMatter::parse(&front_matter).with_context(|| { + format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()) })?; + Ok((meta, content)) } @@ -118,11 +91,8 @@ pub fn split_page_content<'c>( content: &'c str, ) -> Result<(PageFrontMatter, &'c str)> { let (front_matter, content) = split_content(file_path, content)?; - let meta = PageFrontMatter::parse(&front_matter).map_err(|e| { - Error::chain( - format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()), - e, - ) + let meta = PageFrontMatter::parse(&front_matter).with_context(|| { + format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()) })?; Ok((meta, content)) } diff --git a/components/content/src/lib.rs b/components/content/src/lib.rs new file mode 100644 index 0000000000..8741288884 --- /dev/null +++ b/components/content/src/lib.rs @@ -0,0 +1,21 @@ +mod front_matter; + +mod file_info; +mod library; +mod page; +mod pagination; +mod section; +mod ser; +mod sorting; +mod taxonomies; +mod types; +mod utils; + +pub use file_info::FileInfo; +pub use front_matter::{PageFrontMatter, SectionFrontMatter}; +pub use library::Library; +pub use page::Page; +pub use pagination::Paginator; +pub use section::Section; +pub use taxonomies::{Taxonomy, TaxonomyTerm}; +pub use types::*; diff --git a/components/content/src/library.rs b/components/content/src/library.rs new file mode 100644 index 0000000000..a4e4b4e3ce --- /dev/null +++ b/components/content/src/library.rs @@ -0,0 +1,782 @@ +use std::path::{Path, PathBuf}; + +use config::Config; +use libs::ahash::{AHashMap, AHashSet}; + +use crate::ser::TranslatedContent; +use crate::sorting::sort_pages; +use crate::taxonomies::{Taxonomy, TaxonomyFound}; +use crate::{Page, Section, SortBy}; + +macro_rules! set { + ($($key:expr,)+) => (set!($($key),+)); + + ( $($key:expr),* ) => { + { + let mut _set = AHashSet::new(); + $( + _set.insert($key); + )* + _set + } + }; +} + +#[derive(Debug, Default)] +pub struct Library { + pub pages: AHashMap, + pub sections: AHashMap, + // aliases -> files, so we can easily check for conflicts + pub reverse_aliases: AHashMap>, + pub translations: AHashMap>, + pub backlinks: AHashMap>, + // A mapping of {lang -> vec}>>} + taxonomies_def: AHashMap>>>, + // All the taxonomies from config.toml in their slugifiedv ersion + // So we don't need to pass the Config when adding a page to know how to slugify and we only + // slugify once + taxo_name_to_slug: AHashMap, +} + +impl Library { + pub fn new(config: &Config) -> Self { + let mut lib = Self::default(); + + for (lang, options) in &config.languages { + let mut taxas = AHashMap::new(); + for tax_def in &options.taxonomies { + taxas.insert(tax_def.slug.clone(), AHashMap::new()); + lib.taxo_name_to_slug.insert(tax_def.name.clone(), tax_def.slug.clone()); + } + lib.taxonomies_def.insert(lang.to_string(), taxas); + } + lib + } + + fn insert_reverse_aliases(&mut self, file_path: &Path, entries: Vec) { + for entry in entries { + self.reverse_aliases + .entry(entry) + .and_modify(|s| { + s.insert(file_path.to_path_buf()); + }) + .or_insert_with(|| set! {file_path.to_path_buf()}); + } + } + + /// This will check every section/page paths + the aliases and ensure none of them + /// are colliding. + /// Returns Vec<(path colliding, [list of files causing that collision])> + pub fn find_path_collisions(&self) -> Vec<(String, Vec)> { + self.reverse_aliases + .iter() + .filter_map(|(alias, files)| { + if files.len() > 1 { + Some((alias.clone(), files.clone().into_iter().collect::>())) + } else { + None + } + }) + .collect() + } + + pub fn insert_page(&mut self, page: Page) { + let file_path = page.file.path.clone(); + let mut entries = vec![page.path.clone()]; + entries.extend(page.meta.aliases.to_vec()); + self.insert_reverse_aliases(&file_path, entries); + + for (taxa_name, terms) in &page.meta.taxonomies { + for term in terms { + // Safe unwraps as we create all lang/taxa and we validated that they are correct + // before getting there + let taxa_def = self + .taxonomies_def + .get_mut(&page.lang) + .expect("lang not found") + .get_mut(&self.taxo_name_to_slug[taxa_name]) + .expect("taxa not found"); + + if !taxa_def.contains_key(term) { + taxa_def.insert(term.to_string(), Vec::new()); + } + taxa_def.get_mut(term).unwrap().push(page.file.path.clone()); + } + } + + self.pages.insert(file_path, page); + } + + pub fn insert_section(&mut self, section: Section) { + let file_path = section.file.path.clone(); + if section.meta.render { + let mut entries = vec![section.path.clone()]; + entries.extend(section.meta.aliases.to_vec()); + self.insert_reverse_aliases(&file_path, entries); + } + self.sections.insert(file_path, section); + } + + /// Fills a map of target -> {content mentioning it} + /// This can only be called _after_ rendering markdown as we need to have accumulated all + /// the links first + pub fn fill_backlinks(&mut self) { + self.backlinks.clear(); + + let mut add_backlink = |target: &str, source: &Path| { + self.backlinks + .entry(target.to_owned()) + .and_modify(|s| { + s.insert(source.to_path_buf()); + }) + .or_insert(set! {source.to_path_buf()}); + }; + + for (_, page) in &self.pages { + for (internal_link, _) in &page.internal_links { + add_backlink(internal_link, &page.file.path); + } + } + for (_, section) in &self.sections { + for (internal_link, _) in §ion.internal_links { + add_backlink(internal_link, §ion.file.path); + } + } + } + + /// This is called _before_ rendering the markdown the pages/sections + pub fn find_taxonomies(&self, config: &Config) -> Vec { + let mut taxonomies = Vec::new(); + + for (lang, taxonomies_data) in &self.taxonomies_def { + for (taxa_slug, terms_pages) in taxonomies_data { + let taxo_config = &config.languages[lang] + .taxonomies + .iter() + .find(|t| &t.slug == taxa_slug) + .expect("taxo should exist"); + let mut taxo_found = TaxonomyFound::new(taxa_slug.to_string(), lang, taxo_config); + for (term, page_path) in terms_pages { + taxo_found + .terms + .insert(term, page_path.iter().map(|p| &self.pages[p]).collect()); + } + + taxonomies.push(Taxonomy::new(taxo_found, config)); + } + } + + taxonomies + } + + /// Sort all sections pages according to sorting method given + /// Pages that cannot be sorted are set to the section.ignored_pages instead + pub fn sort_section_pages(&mut self) { + let mut updates = AHashMap::new(); + for (path, section) in &self.sections { + let pages: Vec<_> = section.pages.iter().map(|p| &self.pages[p]).collect(); + let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by { + SortBy::None => continue, + _ => sort_pages(&pages, section.meta.sort_by), + }; + + updates + .insert(path.clone(), (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by)); + } + + for (path, (sorted, unsortable, _)) in updates { + if !self.sections[&path].meta.transparent { + // Fill siblings + for (i, page_path) in sorted.iter().enumerate() { + let mut p = self.pages.get_mut(page_path).unwrap(); + if i > 0 { + // lighter / later / title_prev + p.lower = Some(sorted[i - 1].clone()); + } + + if i < sorted.len() - 1 { + // heavier / earlier / title_next + p.higher = Some(sorted[i + 1].clone()); + } + } + } + + if let Some(s) = self.sections.get_mut(&path) { + s.pages = sorted; + s.ignored_pages = unsortable; + } + } + } + + /// Find out the direct subsections of each subsection if there are some + /// as well as the pages for each section + pub fn populate_sections(&mut self, config: &Config, content_path: &Path) { + let mut add_translation = |entry: &Path, path: &Path| { + if config.is_multilingual() { + self.translations + .entry(entry.to_path_buf()) + .and_modify(|trans| { + trans.insert(path.to_path_buf()); + }) + .or_insert(set! {path.to_path_buf()}); + } + }; + + let mut ancestors = AHashMap::new(); + let mut subsections = AHashMap::new(); + let mut sections_weight = AHashMap::new(); + + // We iterate over the sections twice + // The first time to build up the list of ancestors for each section + for (path, section) in &self.sections { + sections_weight.insert(path.clone(), section.meta.weight); + if let Some(ref grand_parent) = section.file.grand_parent { + subsections + // Using the original filename to work for multi-lingual sections + .entry(grand_parent.join(§ion.file.filename)) + .or_insert_with(Vec::new) + .push(section.file.path.clone()); + } + + add_translation(§ion.file.canonical, path); + + // Root sections have no ancestors + if section.is_index() { + ancestors.insert(section.file.path.clone(), vec![]); + continue; + } + + // Index section is the first ancestor of every single section + let mut cur_path = content_path.to_path_buf(); + let mut parents = vec![section.file.filename.clone()]; + for component in §ion.file.components { + cur_path = cur_path.join(component); + // Skip itself + if cur_path == section.file.parent { + continue; + } + + let index_path = cur_path.join(§ion.file.filename); + if let Some(s) = self.sections.get(&index_path) { + parents.push(s.file.relative.clone()); + } + } + ancestors.insert(section.file.path.clone(), parents); + } + + // The second time we actually assign ancestors and order subsections based on their weights + for (path, section) in self.sections.iter_mut() { + section.subsections.clear(); + section.pages.clear(); + section.ignored_pages.clear(); + section.ancestors.clear(); + + if let Some(children) = subsections.get(&*path) { + let mut children: Vec<_> = children.clone(); + children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b])); + section.subsections = children; + } + if let Some(parents) = ancestors.get(&*path) { + section.ancestors = parents.clone(); + } + } + + // We pre-build the index filename for each language + let mut index_filename_by_lang = AHashMap::with_capacity(config.languages.len()); + for code in config.languages.keys() { + if code == &config.default_language { + index_filename_by_lang.insert(code, "_index.md".to_owned()); + } else { + index_filename_by_lang.insert(code, format!("_index.{}.md", code)); + } + } + + // Then once we took care of the sections, we find the pages of each section + for (path, page) in self.pages.iter_mut() { + let parent_filename = &index_filename_by_lang[&page.lang]; + add_translation(&page.file.canonical, path); + let mut parent_section_path = page.file.parent.join(&parent_filename); + + while let Some(parent_section) = self.sections.get_mut(&parent_section_path) { + let is_transparent = parent_section.meta.transparent; + parent_section.pages.push(path.clone()); + page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_default(); + // Don't forget to push the actual parent + page.ancestors.push(parent_section.file.relative.clone()); + + // Find the page template if one of a parent has page_template set + // Stops after the first one found, keep in mind page.ancestors + // is [index, ..., parent] so we need to reverse it first + if page.meta.template.is_none() { + for ancestor in page.ancestors.iter().rev() { + let s = self.sections.get(&content_path.join(ancestor)).unwrap(); + if let Some(ref tpl) = s.meta.page_template { + page.meta.template = Some(tpl.clone()); + break; + } + } + } + + if !is_transparent { + break; + } + + // We've added `_index(.{LANG})?.md` so if we are here so we need to go up twice + match parent_section_path.clone().parent().unwrap().parent() { + Some(parent) => parent_section_path = parent.join(&parent_filename), + None => break, + } + } + } + + // And once we have all the pages assigned to their section, we sort them + self.sort_section_pages(); + } + + /// Find all the orphan pages: pages that are in a folder without an `_index.md` + pub fn get_all_orphan_pages(&self) -> Vec<&Page> { + self.pages.iter().filter(|(_, p)| p.ancestors.is_empty()).map(|(_, p)| p).collect() + } + + /// Find all the translated content for a given canonical path. + /// The translated content can be either for a section or a page + pub fn find_translations(&self, canonical_path: &Path) -> Vec> { + let mut translations = vec![]; + + if let Some(paths) = self.translations.get(canonical_path) { + for path in paths { + let (lang, permalink, title, path) = { + if self.sections.contains_key(path) { + let s = &self.sections[path]; + (&s.lang, &s.permalink, &s.meta.title, &s.file.path) + } else { + let s = &self.pages[path]; + (&s.lang, &s.permalink, &s.meta.title, &s.file.path) + } + }; + translations.push(TranslatedContent { lang, permalink, title, path }); + } + } + + translations + } + + pub fn find_pages_by_path(&self, paths: &[PathBuf]) -> Vec<&Page> { + paths.iter().map(|p| &self.pages[p]).collect() + } + + pub fn find_sections_by_path(&self, paths: &[PathBuf]) -> Vec<&Section> { + paths.iter().map(|p| &self.sections[p]).collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::FileInfo; + use config::{LanguageOptions, TaxonomyConfig}; + use std::collections::HashMap; + use utils::slugs::SlugifyStrategy; + + #[test] + fn can_find_collisions_with_paths() { + let mut library = Library::default(); + let mut section = Section { path: "hello".to_owned(), ..Default::default() }; + section.file.path = PathBuf::from("hello.md"); + library.insert_section(section.clone()); + let mut section2 = Section { path: "hello".to_owned(), ..Default::default() }; + section2.file.path = PathBuf::from("bonjour.md"); + library.insert_section(section2.clone()); + + let collisions = library.find_path_collisions(); + assert_eq!(collisions.len(), 1); + assert_eq!(collisions[0].0, "hello"); + assert!(collisions[0].1.contains(§ion.file.path)); + assert!(collisions[0].1.contains(§ion2.file.path)); + } + + #[test] + fn can_find_collisions_with_aliases() { + let mut library = Library::default(); + let mut section = Section { path: "hello".to_owned(), ..Default::default() }; + section.file.path = PathBuf::from("hello.md"); + library.insert_section(section.clone()); + let mut section2 = Section { path: "world".to_owned(), ..Default::default() }; + section2.file.path = PathBuf::from("bonjour.md"); + section2.meta.aliases = vec!["hello".to_owned(), "hola".to_owned()]; + library.insert_section(section2.clone()); + // Sections with render=false do not collide with anything + // https://github.com/getzola/zola/issues/1656 + let mut section3 = Section { path: "world2".to_owned(), ..Default::default() }; + section3.meta.render = false; + section3.file.path = PathBuf::from("bonjour2.md"); + section3.meta.aliases = vec!["hola".to_owned()]; + library.insert_section(section3); + + let collisions = library.find_path_collisions(); + assert_eq!(collisions.len(), 1); + assert_eq!(collisions[0].0, "hello"); + assert!(collisions[0].1.contains(§ion.file.path)); + assert!(collisions[0].1.contains(§ion2.file.path)); + } + + #[derive(Debug, Clone)] + enum PageSort { + None, + Date(&'static str), + Title(&'static str), + Weight(usize), + } + + fn create_page(file_path: &str, lang: &str, page_sort: PageSort) -> Page { + let mut page = Page::default(); + page.lang = lang.to_owned(); + page.file = FileInfo::new_page(Path::new(file_path), &PathBuf::new()); + match page_sort { + PageSort::None => (), + PageSort::Date(date) => { + page.meta.date = Some(date.to_owned()); + page.meta.date_to_datetime(); + } + PageSort::Title(title) => { + page.meta.title = Some(title.to_owned()); + } + PageSort::Weight(w) => { + page.meta.weight = Some(w); + } + } + page.file.find_language("en", &["fr"]).unwrap(); + page + } + + fn create_section( + file_path: &str, + lang: &str, + weight: usize, + transparent: bool, + sort_by: SortBy, + ) -> Section { + let mut section = Section::default(); + section.lang = lang.to_owned(); + section.file = FileInfo::new_section(Path::new(file_path), &PathBuf::new()); + section.meta.weight = weight; + section.meta.transparent = transparent; + section.meta.sort_by = sort_by; + section.meta.page_template = Some("new_page.html".to_owned()); + section.file.find_language("en", &["fr"]).unwrap(); + section + } + + #[test] + fn can_populate_sections() { + let mut config = Config::default_for_test(); + config.languages.insert("fr".to_owned(), LanguageOptions::default()); + let mut library = Library::default(); + let sections = vec![ + ("content/_index.md", "en", 0, false, SortBy::None), + ("content/_index.fr.md", "fr", 0, false, SortBy::None), + ("content/blog/_index.md", "en", 0, false, SortBy::Date), + ("content/wiki/_index.md", "en", 0, false, SortBy::Weight), + ("content/wiki/_index.fr.md", "fr", 0, false, SortBy::Weight), + ("content/wiki/recipes/_index.md", "en", 1, true, SortBy::Weight), + ("content/wiki/recipes/_index.fr.md", "fr", 1, true, SortBy::Weight), + ("content/wiki/programming/_index.md", "en", 10, true, SortBy::Weight), + ("content/wiki/programming/_index.fr.md", "fr", 10, true, SortBy::Weight), + ("content/novels/_index.md", "en", 10, false, SortBy::Title), + ("content/novels/_index.fr.md", "fr", 10, false, SortBy::Title), + ]; + for (p, l, w, t, s) in sections.clone() { + library.insert_section(create_section(p, l, w, t, s)); + } + + let pages = vec![ + ("content/about.md", "en", PageSort::None), + ("content/about.fr.md", "en", PageSort::None), + ("content/blog/rust.md", "en", PageSort::Date("2022-01-01")), + ("content/blog/python.md", "en", PageSort::Date("2022-03-03")), + ("content/blog/docker.md", "en", PageSort::Date("2022-02-02")), + ("content/wiki/recipes/chocolate-cake.md", "en", PageSort::Weight(100)), + ("content/wiki/recipes/chocolate-cake.fr.md", "fr", PageSort::Weight(100)), + ("content/wiki/recipes/rendang.md", "en", PageSort::Weight(5)), + ("content/wiki/recipes/rendang.fr.md", "fr", PageSort::Weight(5)), + ("content/wiki/programming/rust.md", "en", PageSort::Weight(1)), + ("content/wiki/programming/rust.fr.md", "fr", PageSort::Weight(1)), + ("content/wiki/programming/zola.md", "en", PageSort::Weight(10)), + ("content/wiki/programming/python.md", "en", PageSort::None), + ("content/novels/the-colour-of-magic.md", "en", PageSort::Title("The Colour of Magic")), + ( + "content/novels/the-colour-of-magic.fr.md", + "en", + PageSort::Title("La Huitième Couleur"), + ), + ("content/novels/reaper.md", "en", PageSort::Title("Reaper")), + ("content/novels/reaper.fr.md", "fr", PageSort::Title("Reaper (fr)")), + ("content/random/hello.md", "en", PageSort::None), + ]; + for (p, l, s) in pages.clone() { + library.insert_page(create_page(p, l, s)); + } + library.populate_sections(&config, Path::new("content")); + assert_eq!(library.sections.len(), sections.len()); + assert_eq!(library.pages.len(), pages.len()); + let blog_section = &library.sections[&PathBuf::from("content/blog/_index.md")]; + assert_eq!(blog_section.pages.len(), 3); + // sorted by date in desc order + assert_eq!( + blog_section.pages, + vec![ + PathBuf::from("content/blog/python.md"), + PathBuf::from("content/blog/docker.md"), + PathBuf::from("content/blog/rust.md") + ] + ); + assert_eq!(blog_section.ignored_pages.len(), 0); + assert!(&library.pages[&PathBuf::from("content/blog/python.md")].lower.is_none()); + assert_eq!( + &library.pages[&PathBuf::from("content/blog/python.md")].higher, + &Some(PathBuf::from("content/blog/docker.md")) + ); + assert_eq!( + library.pages[&PathBuf::from("content/blog/python.md")].meta.template, + Some("new_page.html".to_owned()) + ); + + let wiki = &library.sections[&PathBuf::from("content/wiki/_index.md")]; + assert_eq!(wiki.pages.len(), 4); + // sorted by weight, in asc order + assert_eq!( + wiki.pages, + vec![ + PathBuf::from("content/wiki/programming/rust.md"), + PathBuf::from("content/wiki/recipes/rendang.md"), + PathBuf::from("content/wiki/programming/zola.md"), + PathBuf::from("content/wiki/recipes/chocolate-cake.md"), + ] + ); + assert_eq!(wiki.ignored_pages.len(), 1); + assert_eq!(wiki.ignored_pages, vec![PathBuf::from("content/wiki/programming/python.md")]); + assert_eq!( + &library.pages[&PathBuf::from("content/wiki/recipes/rendang.md")].lower, + &Some(PathBuf::from("content/wiki/programming/rust.md")) + ); + assert_eq!( + &library.pages[&PathBuf::from("content/wiki/recipes/rendang.md")].higher, + &Some(PathBuf::from("content/wiki/programming/zola.md")) + ); + assert_eq!( + wiki.subsections, + vec![ + PathBuf::from("content/wiki/recipes/_index.md"), + PathBuf::from("content/wiki/programming/_index.md") + ] + ); + assert_eq!(wiki.ancestors, vec!["_index.md".to_owned()]); + assert_eq!( + library.sections[&PathBuf::from("content/wiki/recipes/_index.md")].ancestors, + vec!["_index.md".to_owned(), "wiki/_index.md".to_owned()] + ); + + // also works for other languages + let french_wiki = &library.sections[&PathBuf::from("content/wiki/_index.fr.md")]; + assert_eq!(french_wiki.pages.len(), 3); + // sorted by weight, in asc order + assert_eq!( + french_wiki.pages, + vec![ + PathBuf::from("content/wiki/programming/rust.fr.md"), + PathBuf::from("content/wiki/recipes/rendang.fr.md"), + PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md"), + ] + ); + assert_eq!(french_wiki.ignored_pages.len(), 0); + assert!(&library.pages[&PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md")] + .higher + .is_none()); + assert_eq!( + &library.pages[&PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md")].lower, + &Some(PathBuf::from("content/wiki/recipes/rendang.fr.md")) + ); + + let orphans = library.get_all_orphan_pages(); + assert_eq!(orphans.len(), 1); + assert_eq!(orphans[0].file.path, PathBuf::from("content/random/hello.md")); + + // And translations should be filled in + let translations = library.find_translations(&PathBuf::from("content/novels/reaper")); + assert_eq!(translations.len(), 2); + assert!(translations[0].title.is_some()); + assert!(translations[1].title.is_some()); + } + + macro_rules! taxonomies { + ($config:expr, [$($page:expr),+]) => {{ + let mut library = Library::new(&$config); + $( + library.insert_page($page); + )+ + library.find_taxonomies(&$config) + }}; + } + + fn create_page_w_taxa(path: &str, lang: &str, taxo: Vec<(&str, Vec<&str>)>) -> Page { + let mut page = Page::default(); + page.file.path = PathBuf::from(path); + page.lang = lang.to_owned(); + let mut taxonomies = HashMap::new(); + for (name, terms) in taxo { + taxonomies.insert(name.to_owned(), terms.iter().map(|t| t.to_string()).collect()); + } + page.meta.taxonomies = taxonomies; + page + } + + #[test] + fn can_make_taxonomies() { + let mut config = Config::default_for_test(); + config.languages.get_mut("en").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, + ]; + config.slugify_taxonomies(); + + let page1 = create_page_w_taxa( + "a.md", + "en", + vec![("tags", vec!["rust", "db"]), ("categories", vec!["tutorials"])], + ); + let page2 = create_page_w_taxa( + "b.md", + "en", + vec![("tags", vec!["rust", "js"]), ("categories", vec!["others"])], + ); + let page3 = create_page_w_taxa( + "c.md", + "en", + vec![("tags", vec!["js"]), ("authors", vec!["Vincent Prouillet"])], + ); + let taxonomies = taxonomies!(config, [page1, page2, page3]); + + let tags = taxonomies.iter().find(|t| t.kind.name == "tags").unwrap(); + assert_eq!(tags.len(), 3); + assert_eq!(tags.items[0].name, "db"); + assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); + assert_eq!(tags.items[0].pages.len(), 1); + assert_eq!(tags.items[1].name, "js"); + assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/"); + assert_eq!(tags.items[1].pages.len(), 2); + assert_eq!(tags.items[2].name, "rust"); + assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/"); + assert_eq!(tags.items[2].pages.len(), 2); + + let categories = taxonomies.iter().find(|t| t.kind.name == "categories").unwrap(); + assert_eq!(categories.items.len(), 2); + assert_eq!(categories.items[0].name, "others"); + assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/others/"); + assert_eq!(categories.items[0].pages.len(), 1); + + let authors = taxonomies.iter().find(|t| t.kind.name == "authors").unwrap(); + assert_eq!(authors.items.len(), 1); + assert_eq!(authors.items[0].permalink, "http://a-website.com/authors/vincent-prouillet/"); + } + + #[test] + fn can_make_multiple_language_taxonomies() { + let mut config = Config::default_for_test(); + config.slugify.taxonomies = SlugifyStrategy::Safe; + config.languages.insert("fr".to_owned(), LanguageOptions::default()); + config.languages.get_mut("en").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, + ]; + config.languages.get_mut("fr").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "catégories".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, + ]; + config.slugify_taxonomies(); + + let page1 = create_page_w_taxa("a.md", "en", vec![("categories", vec!["rust"])]); + let page2 = create_page_w_taxa("b.md", "en", vec![("tags", vec!["rust"])]); + let page3 = create_page_w_taxa("c.md", "fr", vec![("catégories", vec!["rust"])]); + let taxonomies = taxonomies!(config, [page1, page2, page3]); + + let categories = taxonomies.iter().find(|t| t.kind.name == "categories").unwrap(); + assert_eq!(categories.len(), 1); + assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/rust/"); + let tags = taxonomies.iter().find(|t| t.kind.name == "tags" && t.lang == "en").unwrap(); + assert_eq!(tags.len(), 1); + assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/rust/"); + let fr_categories = taxonomies.iter().find(|t| t.kind.name == "catégories").unwrap(); + assert_eq!(fr_categories.len(), 1); + assert_eq!(fr_categories.items[0].permalink, "http://a-website.com/fr/catégories/rust/"); + } + + #[test] + fn taxonomies_with_unic_are_grouped_with_default_slugify_strategy() { + let mut config = Config::default_for_test(); + config.languages.get_mut("en").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, + ]; + config.slugify_taxonomies(); + let page1 = create_page_w_taxa("a.md", "en", vec![("test-taxonomy", vec!["Ecole"])]); + let page2 = create_page_w_taxa("b.md", "en", vec![("test taxonomy", vec!["École"])]); + let page3 = create_page_w_taxa("c.md", "en", vec![("test-taxonomy ", vec!["ecole"])]); + let page4 = create_page_w_taxa("d.md", "en", vec![("Test-Taxonomy ", vec!["école"])]); + let taxonomies = taxonomies!(config, [page1, page2, page3, page4]); + assert_eq!(taxonomies.len(), 1); + + let tax = &taxonomies[0]; + // under the default slugify strategy all of the provided terms should be the same + assert_eq!(tax.items.len(), 1); + let term1 = &tax.items[0]; + assert_eq!(term1.name, "Ecole"); + assert_eq!(term1.slug, "ecole"); + assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/ecole/"); + assert_eq!(term1.pages.len(), 4); + } + + #[test] + fn taxonomies_with_unic_are_not_grouped_with_safe_slugify_strategy() { + let mut config = Config::default_for_test(); + config.slugify.taxonomies = SlugifyStrategy::Safe; + config.languages.get_mut("en").unwrap().taxonomies = + vec![TaxonomyConfig { name: "test".to_string(), ..TaxonomyConfig::default() }]; + config.slugify_taxonomies(); + let page1 = create_page_w_taxa("a.md", "en", vec![("test", vec!["Ecole"])]); + let page2 = create_page_w_taxa("b.md", "en", vec![("test", vec!["École"])]); + let page3 = create_page_w_taxa("c.md", "en", vec![("test", vec!["ecole"])]); + let page4 = create_page_w_taxa("d.md", "en", vec![("test", vec!["école"])]); + let taxonomies = taxonomies!(config, [page1, page2, page3, page4]); + assert_eq!(taxonomies.len(), 1); + let tax = &taxonomies[0]; + // under the safe slugify strategy all terms should be distinct + assert_eq!(tax.items.len(), 4); + } + + #[test] + fn can_fill_backlinks() { + let mut page1 = create_page("page1.md", "en", PageSort::None); + page1.internal_links.push(("page2.md".to_owned(), None)); + let mut page2 = create_page("page2.md", "en", PageSort::None); + page2.internal_links.push(("_index.md".to_owned(), None)); + let mut section1 = create_section("_index.md", "en", 10, false, SortBy::None); + section1.internal_links.push(("page1.md".to_owned(), None)); + section1.internal_links.push(("page2.md".to_owned(), None)); + let mut library = Library::default(); + library.insert_page(page1); + library.insert_page(page2); + library.insert_section(section1); + library.fill_backlinks(); + + assert_eq!(library.backlinks.len(), 3); + assert_eq!(library.backlinks["page1.md"], set! {PathBuf::from("_index.md")}); + assert_eq!( + library.backlinks["page2.md"], + set! {PathBuf::from("page1.md"), PathBuf::from("_index.md")} + ); + assert_eq!(library.backlinks["_index.md"], set! {PathBuf::from("page2.md")}); + } +} diff --git a/components/library/src/content/page.rs b/components/content/src/page.rs similarity index 89% rename from components/library/src/content/page.rs rename to components/content/src/page.rs index 26c5340846..dafe068835 100644 --- a/components/library/src/content/page.rs +++ b/components/content/src/page.rs @@ -2,35 +2,36 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; -use lazy_static::lazy_static; -use regex::Regex; -use slotmap::DefaultKey; -use tera::{Context as TeraContext, Tera}; +use libs::once_cell::sync::Lazy; +use libs::regex::Regex; +use libs::tera::{Context as TeraContext, Tera}; -use crate::library::Library; use config::Config; -use errors::{Error, Result}; -use front_matter::{split_page_content, InsertAnchor, PageFrontMatter}; -use rendering::{render_content, Heading, RenderContext}; -use utils::site::get_reading_analytics; +use errors::{Context, Result}; +use markdown::{render_content, RenderContext}; use utils::slugs::slugify_paths; +use utils::table_of_contents::Heading; use utils::templates::{render_template, ShortcodeDefinition}; +use utils::types::InsertAnchor; -use crate::content::file_info::FileInfo; -use crate::content::ser::SerializingPage; -use crate::content::{find_related_assets, has_anchor}; +use crate::file_info::FileInfo; +use crate::front_matter::{split_page_content, PageFrontMatter}; +use crate::library::Library; +use crate::ser::SerializingPage; +use crate::utils::get_reading_analytics; +use crate::utils::{find_related_assets, has_anchor}; +use utils::anchors::has_anchor_id; use utils::fs::read_file; -use utils::links::has_anchor_id; -lazy_static! { - // Based on https://regex101.com/r/H2n38Z/1/tests - // A regex parsing RFC3339 date followed by {_,-}, some characters and ended by .md - static ref RFC3339_DATE: Regex = Regex::new( +// Based on https://regex101.com/r/H2n38Z/1/tests +// A regex parsing RFC3339 date followed by {_,-}, some characters and ended by .md +static RFC3339_DATE: Lazy = Lazy::new(|| { + Regex::new( r"^(?P(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])(T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(Z|(\+|-)([01][0-9]|2[0-3]):([0-5][0-9])))?)\s?(_|-)(?P.+$)" - ).unwrap(); + ).unwrap() +}); - static ref FOOTNOTES_RE: Regex = Regex::new(r"\s*.*?").unwrap(); -} +static FOOTNOTES_RE: Lazy = Lazy::new(|| Regex::new(r"\s*.*?").unwrap()); #[derive(Clone, Debug, Default, PartialEq)] pub struct Page { @@ -38,8 +39,8 @@ pub struct Page { pub file: FileInfo, /// The front matter meta-data pub meta: PageFrontMatter, - /// The list of parent sections - pub ancestors: Vec, + /// The list of parent sections relative paths + pub ancestors: Vec, /// The actual content of the page, in markdown pub raw_content: String, /// All the non-md files we found next to the .md file @@ -61,22 +62,10 @@ pub struct Page { /// When is found in the text, will take the content up to that part /// as summary pub summary: Option, - /// The earlier updated page, for pages sorted by updated date - pub earlier_updated: Option, - /// The later updated page, for pages sorted by updated date - pub later_updated: Option, - /// The earlier page, for pages sorted by date - pub earlier: Option, - /// The later page, for pages sorted by date - pub later: Option, - /// The previous page, for pages sorted by title - pub title_prev: Option, - /// The next page, for pages sorted by title - pub title_next: Option, - /// The lighter page, for pages sorted by weight - pub lighter: Option, - /// The heavier page, for pages sorted by weight - pub heavier: Option, + /// The previous page when sorting: earlier/earlier_updated/lighter/prev + pub lower: Option, + /// The next page when sorting: later/later_updated/heavier/next + pub higher: Option, /// Toc made from the headings of the markdown file pub toc: Vec, /// How many words in the raw content @@ -88,7 +77,7 @@ pub struct Page { /// Corresponds to the lang in the {slug}.{lang}.md file scheme pub lang: String, /// Contains all the translated version of that page - pub translations: Vec, + pub translations: Vec, /// The list of all internal links (as path to markdown file), with optional anchor fragments. /// We can only check the anchor after all pages have been built and their ToC compiled. /// The page itself should exist otherwise it would have errored before getting there. @@ -116,7 +105,8 @@ impl Page { let (meta, content) = split_page_content(file_path, content)?; let mut page = Page::new(file_path, meta, base_path); - page.lang = page.file.find_language(config)?; + page.lang = + page.file.find_language(&config.default_language, &config.other_languages_codes())?; page.raw_content = content.to_string(); let (word_count, reading_time) = get_reading_analytics(&page.raw_content); @@ -201,6 +191,8 @@ impl Page { Ok(page) } + pub fn find_language(&mut self) {} + /// Read and parse a .md file into a Page struct pub fn from_file>(path: P, config: &Config, base_path: &Path) -> Result { let path = path.as_ref(); @@ -238,11 +230,10 @@ impl Page { ); context.set_shortcode_definitions(shortcode_definitions); context.set_current_page_path(&self.file.relative); - context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None)); + context.tera_context.insert("page", &SerializingPage::new(self, None, false)); - let res = render_content(&self.raw_content, &context).map_err(|e| { - Error::chain(format!("Failed to render content of {}", self.file.path.display()), e) - })?; + let res = render_content(&self.raw_content, &context) + .with_context(|| format!("Failed to render content of {}", self.file.path.display()))?; self.summary = res .summary_len @@ -267,12 +258,11 @@ impl Page { context.insert("config", &config.serialize(&self.lang)); context.insert("current_url", &self.permalink); context.insert("current_path", &self.path); - context.insert("page", &self.to_serialized(library)); + context.insert("page", &self.serialize(library)); context.insert("lang", &self.lang); - render_template(tpl_name, tera, context, &config.theme).map_err(|e| { - Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e) - }) + render_template(tpl_name, tera, context, &config.theme) + .with_context(|| format!("Failed to render page '{}'", self.file.path.display())) } /// Creates a vectors of asset URLs. @@ -305,12 +295,12 @@ impl Page { has_anchor_id(&self.content, id) } - pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { - SerializingPage::from_page(self, library) + pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { + SerializingPage::new(self, Some(library), true) } - pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { - SerializingPage::from_page_basic(self, Some(library)) + pub fn serialize_without_siblings<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { + SerializingPage::new(self, Some(library), false) } } @@ -321,14 +311,14 @@ mod tests { use std::io::Write; use std::path::{Path, PathBuf}; - use globset::{Glob, GlobSetBuilder}; + use libs::globset::{Glob, GlobSetBuilder}; + use libs::tera::Tera; use tempfile::tempdir; - use tera::Tera; - use super::Page; + use crate::Page; use config::{Config, LanguageOptions}; - use front_matter::InsertAnchor; use utils::slugs::SlugifyStrategy; + use utils::types::InsertAnchor; #[test] fn can_parse_a_valid_page() { @@ -573,11 +563,7 @@ And here's another. [^2] File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("fail.png")).unwrap(); - let res = Page::from_file( - nested_path.join("index.md").as_path(), - &Config::default(), - &path.to_path_buf(), - ); + let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path); assert!(res.is_ok()); let page = res.unwrap(); assert_eq!(page.file.parent, path.join("content").join("posts")); @@ -601,11 +587,7 @@ And here's another. [^2] File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("fail.png")).unwrap(); - let res = Page::from_file( - nested_path.join("index.md").as_path(), - &Config::default(), - &path.to_path_buf(), - ); + let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path); assert!(res.is_ok()); let page = res.unwrap(); assert_eq!(page.file.parent, path.join("content").join("posts")); @@ -629,11 +611,7 @@ And here's another. [^2] File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("fail.png")).unwrap(); - let res = Page::from_file( - nested_path.join("index.md").as_path(), - &Config::default(), - &path.to_path_buf(), - ); + let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path); assert!(res.is_ok()); let page = res.unwrap(); assert_eq!(page.file.parent, path.join("content").join("posts")); @@ -659,11 +637,7 @@ And here's another. [^2] File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("fail.png")).unwrap(); - let res = Page::from_file( - nested_path.join("index.md").as_path(), - &Config::default(), - &path.to_path_buf(), - ); + let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path); assert!(res.is_ok()); let page = res.unwrap(); assert_eq!(page.file.parent, path.join("content").join("posts")); @@ -692,8 +666,7 @@ And here's another. [^2] let mut config = Config::default(); config.ignored_content_globset = Some(gsb.build().unwrap()); - let res = - Page::from_file(nested_path.join("index.md").as_path(), &config, &path.to_path_buf()); + let res = Page::from_file(nested_path.join("index.md").as_path(), &config, path); assert!(res.is_ok()); let page = res.unwrap(); diff --git a/components/library/src/pagination/mod.rs b/components/content/src/pagination.rs similarity index 74% rename from components/library/src/pagination/mod.rs rename to components/content/src/pagination.rs index 899b20bbcf..d4d23de0d9 100644 --- a/components/library/src/pagination/mod.rs +++ b/components/content/src/pagination.rs @@ -1,23 +1,22 @@ +use config::Config; +use serde::Serialize; +use std::borrow::Cow; use std::collections::HashMap; +use std::path::PathBuf; -use serde_derive::Serialize; -use slotmap::DefaultKey; -use tera::{to_value, Context, Tera, Value}; - -use config::Config; -use errors::{Error, Result}; +use errors::{Context as ErrorContext, Result}; +use libs::tera::{to_value, Context, Tera, Value}; use utils::templates::{check_template_fallbacks, render_template}; -use crate::content::{Section, SerializingPage, SerializingSection}; use crate::library::Library; -use crate::taxonomies::{Taxonomy, TaxonomyItem}; - -use std::borrow::Cow; +use crate::ser::{SectionSerMode, SerializingPage, SerializingSection}; +use crate::taxonomies::{Taxonomy, TaxonomyTerm}; +use crate::Section; #[derive(Clone, Debug, PartialEq)] enum PaginationRoot<'a> { Section(&'a Section), - Taxonomy(&'a Taxonomy, &'a TaxonomyItem), + Taxonomy(&'a Taxonomy, &'a TaxonomyTerm), } /// A list of all the pages in the paginator with their index and links @@ -26,11 +25,11 @@ pub struct Pager<'a> { /// The page number in the paginator (1-indexed) pub index: usize, /// Permalink to that page - permalink: String, + pub permalink: String, /// Path to that page - path: String, + pub path: String, /// All pages for the pager - pages: Vec>, + pub pages: Vec>, } impl<'a> Pager<'a> { @@ -47,7 +46,7 @@ impl<'a> Pager<'a> { #[derive(Clone, Debug, PartialEq)] pub struct Paginator<'a> { /// All pages in the section/taxonomy - all_pages: Cow<'a, [DefaultKey]>, + all_pages: Cow<'a, [PathBuf]>, /// Pages split in chunks of `paginate_by` pub pagers: Vec>, /// How many content pages on a paginated page at max @@ -70,12 +69,11 @@ impl<'a> Paginator<'a> { /// It will always at least create one pager (the first) even if there are not enough pages to paginate pub fn from_section(section: &'a Section, library: &'a Library) -> Paginator<'a> { let paginate_by = section.meta.paginate_by.unwrap(); - let paginate_reversed = section.meta.paginate_reversed; let mut paginator = Paginator { all_pages: Cow::from(§ion.pages[..]), pagers: Vec::with_capacity(section.pages.len() / paginate_by), paginate_by, - paginate_reversed, + paginate_reversed: section.meta.paginate_reversed, root: PaginationRoot::Section(section), permalink: section.permalink.clone(), path: section.path.clone(), @@ -92,7 +90,7 @@ impl<'a> Paginator<'a> { /// It will always at least create one pager (the first) even if there are not enough pages to paginate pub fn from_taxonomy( taxonomy: &'a Taxonomy, - item: &'a TaxonomyItem, + item: &'a TaxonomyTerm, library: &'a Library, tera: &Tera, theme: &Option, @@ -100,10 +98,8 @@ impl<'a> Paginator<'a> { let paginate_by = taxonomy.kind.paginate_by.unwrap(); // Check for taxon-specific template, or use generic as fallback. let specific_template = format!("{}/single.html", taxonomy.kind.name); - let template = match check_template_fallbacks(&specific_template, tera, theme) { - Some(template) => template, - None => "taxonomy_single.html", - }; + let template = check_template_fallbacks(&specific_template, tera, theme) + .unwrap_or("taxonomy_single.html"); let mut paginator = Paginator { all_pages: Cow::Borrowed(&item.pages), pagers: Vec::with_capacity(item.pages.len() / paginate_by), @@ -136,9 +132,9 @@ impl<'a> Paginator<'a> { self.all_pages.to_mut().reverse(); } - for key in self.all_pages.to_mut().iter_mut() { - let page = library.get_page_by_key(*key); - current_page.push(page.to_serialized_basic(library)); + for p in &*self.all_pages { + let page = &library.pages[p]; + current_page.push(SerializingPage::new(page, Some(library), false)); if current_page.len() == self.paginate_by { pages.push(current_page); @@ -233,8 +229,10 @@ impl<'a> Paginator<'a> { let mut context = Context::new(); match self.root { PaginationRoot::Section(s) => { - context - .insert("section", &SerializingSection::from_section_basic(s, Some(library))); + context.insert( + "section", + &SerializingSection::new(s, SectionSerMode::MetadataOnly(library)), + ); context.insert("lang", &s.lang); context.insert("config", &config.serialize(&s.lang)); } @@ -250,24 +248,18 @@ impl<'a> Paginator<'a> { context.insert("paginator", &self.build_paginator_context(pager)); render_template(&self.template, tera, context, &config.theme) - .map_err(|e| Error::chain(format!("Failed to render pager {}", pager.index), e)) + .with_context(|| format!("Failed to render pager {}", pager.index)) } } #[cfg(test)] mod tests { - use std::path::PathBuf; - use tera::{to_value, Tera}; - - use crate::content::{Page, Section}; - use crate::library::Library; - use crate::taxonomies::{Taxonomy, TaxonomyItem}; - use config::Taxonomy as TaxonomyConfig; - - use super::Paginator; + use super::*; + use crate::{Page, SectionFrontMatter}; + use config::TaxonomyConfig; fn create_section(is_index: bool, paginate_reversed: bool) -> Section { - let f = front_matter::SectionFrontMatter { + let f = SectionFrontMatter { paginate_by: Some(2), paginate_path: "page".to_string(), paginate_reversed, @@ -278,9 +270,11 @@ mod tests { if !is_index { s.path = "/posts/".to_string(); s.permalink = "https://vincent.is/posts/".to_string(); + s.file.path = PathBuf::from("posts/_index.md"); s.file.components = vec!["posts".to_string()]; } else { s.path = "/".into(); + s.file.path = PathBuf::from("_index.md"); s.permalink = "https://vincent.is/".to_string(); } s @@ -291,89 +285,64 @@ mod tests { num_pages: usize, paginate_reversed: bool, ) -> (Section, Library) { - let mut library = Library::new(num_pages, 0, false); + let mut library = Library::default(); for i in 1..=num_pages { let mut page = Page::default(); page.meta.title = Some(i.to_string()); + page.file.path = PathBuf::from(&format!("{}.md", i)); library.insert_page(page); } - let mut draft = Page::default(); - draft.meta.draft = true; - library.insert_page(draft); let mut section = create_section(is_index, paginate_reversed); - section.pages = library.pages().keys().collect(); + section.pages = library.pages.keys().cloned().collect(); + section.pages.sort(); library.insert_section(section.clone()); (section, library) } #[test] - fn test_can_create_paginator() { + fn test_can_create_section_paginator() { let (section, library) = create_library(false, 3, false); let paginator = Paginator::from_section(§ion, &library); assert_eq!(paginator.pagers.len(), 2); assert_eq!(paginator.pagers[0].index, 1); assert_eq!(paginator.pagers[0].pages.len(), 2); + assert_eq!(paginator.pagers[0].pages[0].title.clone().unwrap(), "1"); + assert_eq!(paginator.pagers[0].pages[1].title.clone().unwrap(), "2"); assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/posts/"); assert_eq!(paginator.pagers[0].path, "/posts/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); + assert_eq!(paginator.pagers[1].pages[0].title.clone().unwrap(), "3"); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/page/2/"); assert_eq!(paginator.pagers[1].path, "/posts/page/2/"); } #[test] - fn test_can_create_reversed_paginator() { - // 6 pages, 5 normal and 1 draft - let (section, library) = create_library(false, 5, true); + fn test_can_create_reversed_section_paginator() { + let (section, library) = create_library(false, 3, true); let paginator = Paginator::from_section(§ion, &library); - assert_eq!(paginator.pagers.len(), 3); + assert_eq!(paginator.pagers.len(), 2); assert_eq!(paginator.pagers[0].index, 1); assert_eq!(paginator.pagers[0].pages.len(), 2); + assert_eq!(paginator.pagers[0].pages[0].title.clone().unwrap(), "3"); + assert_eq!(paginator.pagers[0].pages[1].title.clone().unwrap(), "2"); assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/posts/"); assert_eq!(paginator.pagers[0].path, "/posts/"); - assert_eq!( - vec!["".to_string(), "5".to_string()], - paginator.pagers[0] - .pages - .iter() - .map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string()) - .collect::>() - ); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); + assert_eq!(paginator.pagers[1].pages[0].title.clone().unwrap(), "1"); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/page/2/"); assert_eq!(paginator.pagers[1].path, "/posts/page/2/"); - assert_eq!( - vec!["4".to_string(), "3".to_string()], - paginator.pagers[1] - .pages - .iter() - .map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string()) - .collect::>() - ); - - assert_eq!(paginator.pagers[2].index, 3); - assert_eq!(paginator.pagers[2].pages.len(), 2); - assert_eq!(paginator.pagers[2].permalink, "https://vincent.is/posts/page/3/"); - assert_eq!(paginator.pagers[2].path, "/posts/page/3/"); - assert_eq!( - vec!["2".to_string(), "1".to_string()], - paginator.pagers[2] - .pages - .iter() - .map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string()) - .collect::>() - ); } #[test] - fn test_can_create_paginator_for_index() { + fn can_create_paginator_for_index() { let (section, library) = create_library(true, 3, false); let paginator = Paginator::from_section(§ion, &library); assert_eq!(paginator.pagers.len(), 2); @@ -384,7 +353,7 @@ mod tests { assert_eq!(paginator.pagers[0].path, "/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/page/2/"); assert_eq!(paginator.pagers[1].path, "/page/2/"); } @@ -402,6 +371,7 @@ mod tests { assert_eq!(context["previous"], to_value::>(None).unwrap()); assert_eq!(context["next"], to_value("https://vincent.is/posts/page/2/").unwrap()); assert_eq!(context["current_index"], to_value(1).unwrap()); + assert_eq!(context["pages"].as_array().unwrap().len(), 2); let context = paginator.build_paginator_context(&paginator.pagers[1]); assert_eq!(context["paginate_by"], to_value(2).unwrap()); @@ -410,48 +380,12 @@ mod tests { assert_eq!(context["next"], to_value::>(None).unwrap()); assert_eq!(context["previous"], to_value("https://vincent.is/posts/").unwrap()); assert_eq!(context["current_index"], to_value(2).unwrap()); - assert_eq!(context["total_pages"], to_value(4).unwrap()); + assert_eq!(context["total_pages"], to_value(3).unwrap()); + assert_eq!(context["pages"].as_array().unwrap().len(), 1); } #[test] fn test_can_create_paginator_for_taxonomy() { - let (_, library) = create_library(false, 3, false); - let tera = Tera::default(); - let taxonomy_def = TaxonomyConfig { - name: "tags".to_string(), - paginate_by: Some(2), - ..TaxonomyConfig::default() - }; - let taxonomy_item = TaxonomyItem { - name: "Something".to_string(), - slug: "something".to_string(), - path: "/tags/something".to_string(), - permalink: "https://vincent.is/tags/something/".to_string(), - pages: library.pages().keys().collect(), - }; - let taxonomy = Taxonomy { - kind: taxonomy_def, - lang: "en".to_owned(), - slug: "tags".to_string(), - permalink: "/tags/".to_string(), - items: vec![taxonomy_item.clone()], - }; - let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library, &tera, &None); - assert_eq!(paginator.pagers.len(), 2); - - assert_eq!(paginator.pagers[0].index, 1); - assert_eq!(paginator.pagers[0].pages.len(), 2); - assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/tags/something/"); - assert_eq!(paginator.pagers[0].path, "/tags/something/"); - - assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); - assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/tags/something/page/2/"); - assert_eq!(paginator.pagers[1].path, "/tags/something/page/2/"); - } - - #[test] - fn test_can_create_paginator_for_slugified_taxonomy() { let (_, library) = create_library(false, 3, false); let tera = Tera::default(); let taxonomy_def = TaxonomyConfig { @@ -459,18 +393,19 @@ mod tests { paginate_by: Some(2), ..TaxonomyConfig::default() }; - let taxonomy_item = TaxonomyItem { + let taxonomy_item = TaxonomyTerm { name: "Something".to_string(), slug: "something".to_string(), path: "/some-tags/something/".to_string(), permalink: "https://vincent.is/some-tags/something/".to_string(), - pages: library.pages().keys().collect(), + pages: library.pages.keys().cloned().collect(), }; let taxonomy = Taxonomy { kind: taxonomy_def, lang: "en".to_owned(), slug: "some-tags".to_string(), - permalink: "/some-tags/".to_string(), + path: "/some-tags/".to_string(), + permalink: "https://vincent.is/some-tags/".to_string(), items: vec![taxonomy_item.clone()], }; let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library, &tera, &None); @@ -482,7 +417,7 @@ mod tests { assert_eq!(paginator.pagers[0].path, "/some-tags/something/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/some-tags/something/page/2/"); assert_eq!(paginator.pagers[1].path, "/some-tags/something/page/2/"); } @@ -501,7 +436,7 @@ mod tests { assert_eq!(paginator.pagers[0].path, "/posts/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/2/"); assert_eq!(paginator.pagers[1].path, "/posts/2/"); diff --git a/components/library/src/content/section.rs b/components/content/src/section.rs similarity index 80% rename from components/library/src/content/section.rs rename to components/content/src/section.rs index d2fb636d80..4dadfc20c2 100644 --- a/components/library/src/content/section.rs +++ b/components/content/src/section.rs @@ -1,21 +1,20 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; -use slotmap::DefaultKey; -use tera::{Context as TeraContext, Tera}; +use libs::tera::{Context as TeraContext, Tera}; use config::Config; -use errors::{Error, Result}; -use front_matter::{split_section_content, SectionFrontMatter}; -use rendering::{render_content, Heading, RenderContext}; +use errors::{Context, Result}; +use markdown::{render_content, RenderContext}; use utils::fs::read_file; -use utils::site::get_reading_analytics; +use utils::table_of_contents::Heading; use utils::templates::{render_template, ShortcodeDefinition}; -use crate::content::file_info::FileInfo; -use crate::content::ser::SerializingSection; -use crate::content::{find_related_assets, has_anchor}; +use crate::file_info::FileInfo; +use crate::front_matter::{split_section_content, SectionFrontMatter}; use crate::library::Library; +use crate::ser::{SectionSerMode, SerializingSection}; +use crate::utils::{find_related_assets, get_reading_analytics, has_anchor}; // Default is used to create a default index section if there is no _index.md in the root content directory #[derive(Clone, Debug, Default, PartialEq)] @@ -39,13 +38,13 @@ pub struct Section { /// All the non-md files we found next to the .md file as string pub serialized_assets: Vec, /// All direct pages of that section - pub pages: Vec, + pub pages: Vec, /// All pages that cannot be sorted in this section - pub ignored_pages: Vec, - /// The list of parent sections - pub ancestors: Vec, + pub ignored_pages: Vec, + /// The list of parent sections relative paths + pub ancestors: Vec, /// All direct subsections - pub subsections: Vec, + pub subsections: Vec, /// Toc made from the headings of the markdown file pub toc: Vec, /// How many words in the raw content @@ -83,7 +82,9 @@ impl Section { ) -> Result
{ let (meta, content) = split_section_content(file_path, content)?; let mut section = Section::new(file_path, meta, base_path); - section.lang = section.file.find_language(config)?; + section.lang = section + .file + .find_language(&config.default_language, &config.other_languages_codes())?; section.raw_content = content.to_string(); let (word_count, reading_time) = get_reading_analytics(§ion.raw_content); section.word_count = Some(word_count); @@ -159,11 +160,12 @@ impl Section { ); context.set_shortcode_definitions(shortcode_definitions); context.set_current_page_path(&self.file.relative); - context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None)); + context + .tera_context + .insert("section", &SerializingSection::new(self, SectionSerMode::ForMarkdown)); - let res = render_content(&self.raw_content, &context).map_err(|e| { - Error::chain(format!("Failed to render content of {}", self.file.path.display()), e) - })?; + let res = render_content(&self.raw_content, &context) + .with_context(|| format!("Failed to render content of {}", self.file.path.display()))?; self.content = res.body; self.toc = res.toc; self.external_links = res.external_links; @@ -180,12 +182,11 @@ impl Section { context.insert("config", &config.serialize(&self.lang)); context.insert("current_url", &self.permalink); context.insert("current_path", &self.path); - context.insert("section", &self.to_serialized(library)); + context.insert("section", &SerializingSection::new(self, SectionSerMode::Full(library))); context.insert("lang", &self.lang); - render_template(tpl_name, tera, context, &config.theme).map_err(|e| { - Error::chain(format!("Failed to render section '{}'", self.file.path.display()), e) - }) + render_template(tpl_name, tera, context, &config.theme) + .with_context(|| format!("Failed to render section '{}'", self.file.path.display())) } /// Is this the index section? @@ -207,14 +208,6 @@ impl Section { has_anchor(&self.toc, anchor) } - pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { - SerializingSection::from_section(self, library) - } - - pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { - SerializingSection::from_section_basic(self, Some(library)) - } - pub fn paginate_by(&self) -> Option { match self.meta.paginate_by { None => None, @@ -224,15 +217,23 @@ impl Section { }, } } + + pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { + SerializingSection::new(self, SectionSerMode::Full(library)) + } + + pub fn serialize_basic<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { + SerializingSection::new(self, SectionSerMode::MetadataOnly(library)) + } } #[cfg(test)] mod tests { - use std::fs::{create_dir, File}; + use std::fs::{create_dir, create_dir_all, File}; use std::io::Write; use std::path::{Path, PathBuf}; - use globset::{Glob, GlobSetBuilder}; + use libs::globset::{Glob, GlobSetBuilder}; use tempfile::tempdir; use super::Section; @@ -268,23 +269,27 @@ mod tests { fn section_with_ignored_assets_filters_out_correct_files() { let tmp_dir = tempdir().expect("create temp dir"); let path = tmp_dir.path(); - create_dir(&path.join("content")).expect("create content temp dir"); - create_dir(&path.join("content").join("posts")).expect("create posts temp dir"); - let nested_path = path.join("content").join("posts").join("with-assets"); - create_dir(&nested_path).expect("create nested temp dir"); - let mut f = File::create(nested_path.join("_index.md")).unwrap(); + let article_path = path.join("content/posts/with-assets"); + create_dir_all(path.join(&article_path).join("foo/bar/baz/quux")) + .expect("create nested temp dir"); + create_dir_all(path.join(&article_path).join("foo/baz/quux")) + .expect("create nested temp dir"); + let mut f = File::create(article_path.join("_index.md")).unwrap(); f.write_all(b"+++\nslug=\"hey\"\n+++\n").unwrap(); - File::create(nested_path.join("example.js")).unwrap(); - File::create(nested_path.join("graph.jpg")).unwrap(); - File::create(nested_path.join("fail.png")).unwrap(); + File::create(article_path.join("example.js")).unwrap(); + File::create(article_path.join("graph.jpg")).unwrap(); + File::create(article_path.join("fail.png")).unwrap(); + File::create(article_path.join("foo/bar/baz/quux/quo.xlsx")).unwrap(); + File::create(article_path.join("foo/bar/baz/quux/quo.docx")).unwrap(); let mut gsb = GlobSetBuilder::new(); gsb.add(Glob::new("*.{js,png}").unwrap()); + gsb.add(Glob::new("foo/**/baz").unwrap()); let mut config = Config::default(); config.ignored_content_globset = Some(gsb.build().unwrap()); let res = - Section::from_file(nested_path.join("_index.md").as_path(), &config, &PathBuf::new()); + Section::from_file(article_path.join("_index.md").as_path(), &config, &PathBuf::new()); assert!(res.is_ok()); let page = res.unwrap(); diff --git a/components/content/src/ser.rs b/components/content/src/ser.rs new file mode 100644 index 0000000000..e27a49fbc0 --- /dev/null +++ b/components/content/src/ser.rs @@ -0,0 +1,221 @@ +use std::collections::HashMap; +use std::path::Path; + +use serde::Serialize; + +use crate::library::Library; +use crate::{Page, Section}; +use libs::tera::{Map, Value}; +use utils::table_of_contents::Heading; + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct BackLink<'a> { + pub permalink: &'a str, + pub title: &'a Option, +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct TranslatedContent<'a> { + pub lang: &'a str, + pub permalink: &'a str, + pub title: &'a Option, + /// The path to the markdown file + pub path: &'a Path, +} + +fn find_backlinks<'a>(relative_path: &str, library: &'a Library) -> Vec> { + let mut backlinks = Vec::new(); + if let Some(b) = library.backlinks.get(relative_path) { + for backlink in b { + if let Some(p) = library.pages.get(backlink) { + backlinks.push(BackLink { permalink: &p.permalink, title: &p.meta.title }); + } + if let Some(s) = library.sections.get(backlink) { + backlinks.push(BackLink { permalink: &s.permalink, title: &s.meta.title }); + } + } + backlinks.sort_by_key(|b| b.permalink); + } + backlinks +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct SerializingPage<'a> { + relative_path: &'a str, + content: &'a str, + permalink: &'a str, + slug: &'a str, + ancestors: &'a [String], + pub(crate) title: &'a Option, + description: &'a Option, + updated: &'a Option, + date: &'a Option, + year: Option, + month: Option, + day: Option, + taxonomies: &'a HashMap>, + extra: &'a Map, + path: &'a str, + components: &'a [String], + summary: &'a Option, + toc: &'a [Heading], + word_count: Option, + reading_time: Option, + assets: &'a [String], + draft: bool, + lang: &'a str, + lower: Option>>, + higher: Option>>, + translations: Vec>, + backlinks: Vec>, +} + +impl<'a> SerializingPage<'a> { + pub fn new(page: &'a Page, library: Option<&'a Library>, include_siblings: bool) -> Self { + let mut year = None; + let mut month = None; + let mut day = None; + if let Some(d) = page.meta.datetime_tuple { + year = Some(d.0); + month = Some(d.1); + day = Some(d.2); + } + let mut lower = None; + let mut higher = None; + let mut translations = vec![]; + let mut backlinks = vec![]; + + if let Some(lib) = library { + translations = lib.find_translations(&page.file.canonical); + + if include_siblings { + lower = page + .lower + .as_ref() + .map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false))); + higher = page + .higher + .as_ref() + .map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false))); + } + + backlinks = find_backlinks(&page.file.relative, lib); + } + + Self { + relative_path: &page.file.relative, + ancestors: &page.ancestors, + content: &page.content, + permalink: &page.permalink, + slug: &page.slug, + title: &page.meta.title, + description: &page.meta.description, + extra: &page.meta.extra, + updated: &page.meta.updated, + date: &page.meta.date, + year, + month, + day, + taxonomies: &page.meta.taxonomies, + path: &page.path, + components: &page.components, + summary: &page.summary, + toc: &page.toc, + word_count: page.word_count, + reading_time: page.reading_time, + assets: &page.serialized_assets, + draft: page.meta.draft, + lang: &page.lang, + lower, + higher, + translations, + backlinks, + } + } +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct SerializingSection<'a> { + relative_path: &'a str, + content: &'a str, + permalink: &'a str, + draft: bool, + ancestors: &'a [String], + title: &'a Option, + description: &'a Option, + extra: &'a Map, + path: &'a str, + components: &'a [String], + toc: &'a [Heading], + word_count: Option, + reading_time: Option, + lang: &'a str, + assets: &'a [String], + pages: Vec>, + subsections: Vec<&'a str>, + translations: Vec>, + backlinks: Vec>, +} + +#[derive(Debug)] +pub enum SectionSerMode<'a> { + /// Just itself, no pages or subsections + /// TODO: I believe we can get rid of it? + ForMarkdown, + /// Fetches subsections/ancestors/translations but not the pages + MetadataOnly(&'a Library), + /// Fetches everything + Full(&'a Library), +} + +impl<'a> SerializingSection<'a> { + pub fn new(section: &'a Section, mode: SectionSerMode<'a>) -> Self { + let mut pages = Vec::with_capacity(section.pages.len()); + let mut subsections = Vec::with_capacity(section.subsections.len()); + let mut translations = Vec::new(); + let mut backlinks = Vec::new(); + + match mode { + SectionSerMode::ForMarkdown => {} + SectionSerMode::MetadataOnly(lib) | SectionSerMode::Full(lib) => { + translations = lib.find_translations(§ion.file.canonical); + subsections = section + .subsections + .iter() + .map(|p| lib.sections[p].file.relative.as_str()) + .collect(); + + // Fetching pages on top + if let SectionSerMode::Full(_) = mode { + for p in §ion.pages { + pages.push(SerializingPage::new(&lib.pages[p], Some(lib), true)); + } + } + + backlinks = find_backlinks(§ion.file.relative, lib); + } + } + + Self { + relative_path: §ion.file.relative, + ancestors: §ion.ancestors, + draft: section.meta.draft, + content: §ion.content, + permalink: §ion.permalink, + title: §ion.meta.title, + description: §ion.meta.description, + extra: §ion.meta.extra, + path: §ion.path, + components: §ion.components, + toc: §ion.toc, + word_count: section.word_count, + reading_time: section.reading_time, + assets: §ion.serialized_assets, + lang: §ion.lang, + pages, + subsections, + translations, + backlinks, + } + } +} diff --git a/components/content/src/sorting.rs b/components/content/src/sorting.rs new file mode 100644 index 0000000000..313df3674c --- /dev/null +++ b/components/content/src/sorting.rs @@ -0,0 +1,197 @@ +use std::cmp::Ordering; +use std::path::PathBuf; + +use crate::{Page, SortBy}; +use libs::lexical_sort::natural_lexical_cmp; +use libs::rayon::prelude::*; + +/// Sort by the field picked by the function. +/// The pages permalinks are used to break the ties +pub fn sort_pages(pages: &[&Page], sort_by: SortBy) -> (Vec, Vec) { + let (mut can_be_sorted, cannot_be_sorted): (Vec<&Page>, Vec<_>) = + pages.par_iter().partition(|page| match sort_by { + SortBy::Date => page.meta.datetime.is_some(), + SortBy::UpdateDate => { + page.meta.datetime.is_some() || page.meta.updated_datetime.is_some() + } + SortBy::Title | SortBy::TitleBytes => page.meta.title.is_some(), + SortBy::Weight => page.meta.weight.is_some(), + SortBy::None => unreachable!(), + }); + + can_be_sorted.par_sort_unstable_by(|a, b| { + let ord = match sort_by { + SortBy::Date => b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()), + SortBy::UpdateDate => std::cmp::max(b.meta.datetime, b.meta.updated_datetime) + .unwrap() + .cmp(&std::cmp::max(a.meta.datetime, a.meta.updated_datetime).unwrap()), + SortBy::Title => { + natural_lexical_cmp(a.meta.title.as_ref().unwrap(), b.meta.title.as_ref().unwrap()) + } + SortBy::TitleBytes => { + a.meta.title.as_ref().unwrap().cmp(b.meta.title.as_ref().unwrap()) + } + SortBy::Weight => a.meta.weight.unwrap().cmp(&b.meta.weight.unwrap()), + SortBy::None => unreachable!(), + }; + + if ord == Ordering::Equal { + a.permalink.cmp(&b.permalink) + } else { + ord + } + }); + + ( + can_be_sorted.iter().map(|p| p.file.path.clone()).collect(), + cannot_be_sorted.iter().map(|p: &&Page| p.file.path.clone()).collect(), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::PageFrontMatter; + + fn create_page_with_date(date: &str, updated_date: Option<&str>) -> Page { + let mut front_matter = PageFrontMatter { + date: Some(date.to_string()), + updated: updated_date.map(|c| c.to_string()), + ..Default::default() + }; + front_matter.date_to_datetime(); + Page::new(format!("content/hello-{}.md", date), front_matter, &PathBuf::new()) + } + + fn create_page_with_title(title: &str) -> Page { + let front_matter = PageFrontMatter { title: Some(title.to_string()), ..Default::default() }; + Page::new(format!("content/hello-{}.md", title), front_matter, &PathBuf::new()) + } + + fn create_page_with_weight(weight: usize) -> Page { + let front_matter = PageFrontMatter { weight: Some(weight), ..Default::default() }; + Page::new(format!("content/hello-{}.md", weight), front_matter, &PathBuf::new()) + } + + #[test] + fn can_sort_by_dates() { + let page1 = create_page_with_date("2018-01-01", None); + let page2 = create_page_with_date("2017-01-01", None); + let page3 = create_page_with_date("2019-01-01", None); + let (pages, ignored_pages) = sort_pages(&[&page1, &page2, &page3], SortBy::Date); + assert_eq!(pages[0], page3.file.path); + assert_eq!(pages[1], page1.file.path); + assert_eq!(pages[2], page2.file.path); + assert_eq!(ignored_pages.len(), 0); + } + + #[test] + fn can_sort_by_updated_dates() { + let page1 = create_page_with_date("2018-01-01", None); + let page2 = create_page_with_date("2017-01-01", Some("2022-02-01")); + let page3 = create_page_with_date("2019-01-01", None); + let (pages, ignored_pages) = sort_pages(&[&page1, &page2, &page3], SortBy::UpdateDate); + assert_eq!(pages[0], page2.file.path); + assert_eq!(pages[1], page3.file.path); + assert_eq!(pages[2], page1.file.path); + assert_eq!(ignored_pages.len(), 0); + } + + #[test] + fn can_sort_by_weight() { + let page1 = create_page_with_weight(2); + let page2 = create_page_with_weight(3); + let page3 = create_page_with_weight(1); + let (pages, ignored_pages) = sort_pages(&[&page1, &page2, &page3], SortBy::Weight); + // Should be sorted by weight + assert_eq!(pages[0], page3.file.path); + assert_eq!(pages[1], page1.file.path); + assert_eq!(pages[2], page2.file.path); + assert_eq!(ignored_pages.len(), 0); + } + + #[test] + fn can_sort_by_title() { + let titles = vec![ + "åland", + "bagel", + "track_3", + "microkernel", + "Österrike", + "métro", + "BART", + "Underground", + "track_13", + "μ-kernel", + "meter", + "track_1", + ]; + let pages: Vec = titles.iter().map(|title| create_page_with_title(title)).collect(); + let (sorted_pages, ignored_pages) = + sort_pages(&pages.iter().collect::>(), SortBy::Title); + // Should be sorted by title in lexical order + let sorted_titles: Vec<_> = sorted_pages + .iter() + .map(|key| { + pages.iter().find(|p| &p.file.path == key).unwrap().meta.title.as_ref().unwrap() + }) + .collect(); + assert_eq!(ignored_pages.len(), 0); + assert_eq!( + sorted_titles, + vec![ + "åland", + "bagel", + "BART", + "μ-kernel", + "meter", + "métro", + "microkernel", + "Österrike", + "track_1", + "track_3", + "track_13", + "Underground" + ] + ); + + let (sorted_pages, ignored_pages) = + sort_pages(&pages.iter().collect::>(), SortBy::TitleBytes); + // Should be sorted by title in bytes order + let sorted_titles: Vec<_> = sorted_pages + .iter() + .map(|key| { + pages.iter().find(|p| &p.file.path == key).unwrap().meta.title.as_ref().unwrap() + }) + .collect(); + assert_eq!(ignored_pages.len(), 0); + assert_eq!( + sorted_titles, + vec![ + "BART", + "Underground", + "bagel", + "meter", + "microkernel", + "métro", + "track_1", + "track_13", + "track_3", + // Non ASCII letters are not merged with the ASCII equivalent (o/a/m here) + "Österrike", + "åland", + "μ-kernel" + ] + ); + } + + #[test] + fn can_find_ignored_pages() { + let page1 = create_page_with_date("2018-01-01", None); + let page2 = create_page_with_weight(1); + let (pages, ignored_pages) = sort_pages(&[&page1, &page2], SortBy::Date); + assert_eq!(pages[0], page1.file.path); + assert_eq!(ignored_pages.len(), 1); + assert_eq!(ignored_pages[0], page2.file.path); + } +} diff --git a/components/content/src/taxonomies.rs b/components/content/src/taxonomies.rs new file mode 100644 index 0000000000..52c86e0c01 --- /dev/null +++ b/components/content/src/taxonomies.rs @@ -0,0 +1,244 @@ +use std::cmp::Ordering; +use std::path::PathBuf; + +use serde::Serialize; + +use config::{Config, TaxonomyConfig}; +use errors::{Context as ErrorContext, Result}; +use libs::ahash::AHashMap; +use libs::tera::{Context, Tera}; +use utils::slugs::slugify_paths; +use utils::templates::{check_template_fallbacks, render_template}; + +use crate::library::Library; +use crate::ser::SerializingPage; +use crate::{Page, SortBy}; + +use crate::sorting::sort_pages; + +#[derive(Debug, Clone, PartialEq, Serialize)] +pub struct SerializedTaxonomyTerm<'a> { + name: &'a str, + slug: &'a str, + path: &'a str, + permalink: &'a str, + pages: Vec>, +} + +impl<'a> SerializedTaxonomyTerm<'a> { + pub fn from_item(item: &'a TaxonomyTerm, library: &'a Library) -> Self { + let mut pages = vec![]; + + for p in &item.pages { + pages.push(SerializingPage::new(&library.pages[p], Some(library), false)); + } + + SerializedTaxonomyTerm { + name: &item.name, + slug: &item.slug, + path: &item.path, + permalink: &item.permalink, + pages, + } + } +} + +/// A taxonomy with all its pages +#[derive(Debug, Clone)] +pub struct TaxonomyTerm { + pub name: String, + pub slug: String, + pub path: String, + pub permalink: String, + pub pages: Vec, +} + +impl TaxonomyTerm { + pub fn new( + name: &str, + lang: &str, + taxo_slug: &str, + taxo_pages: &[&Page], + config: &Config, + ) -> Self { + let item_slug = slugify_paths(name, config.slugify.taxonomies); + let path = if lang != config.default_language { + format!("/{}/{}/{}/", lang, taxo_slug, item_slug) + } else { + format!("/{}/{}/", taxo_slug, item_slug) + }; + let permalink = config.make_permalink(&path); + + // Taxonomy are almost always used for blogs so we filter by dates + // and it's not like we can sort things across sections by anything other + // than dates + let (mut pages, ignored_pages) = sort_pages(taxo_pages, SortBy::Date); + // We still append pages without dates at the end + pages.extend(ignored_pages); + TaxonomyTerm { name: name.to_string(), permalink, path, slug: item_slug, pages } + } + + pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializedTaxonomyTerm<'a> { + SerializedTaxonomyTerm::from_item(self, library) + } + + pub fn merge(&mut self, other: Self) { + self.pages.extend(other.pages); + } +} + +impl PartialEq for TaxonomyTerm { + fn eq(&self, other: &Self) -> bool { + self.permalink == other.permalink + } +} + +#[derive(Debug, Clone, PartialEq, Serialize)] +pub struct SerializedTaxonomy<'a> { + kind: &'a TaxonomyConfig, + lang: &'a str, + permalink: &'a str, + items: Vec>, +} + +impl<'a> SerializedTaxonomy<'a> { + pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self { + let items: Vec = + taxonomy.items.iter().map(|i| SerializedTaxonomyTerm::from_item(i, library)).collect(); + SerializedTaxonomy { + kind: &taxonomy.kind, + lang: &taxonomy.lang, + permalink: &taxonomy.permalink, + items, + } + } +} +/// All different taxonomies we have and their content +#[derive(Debug, Clone, PartialEq)] +pub struct Taxonomy { + pub kind: TaxonomyConfig, + pub lang: String, + pub slug: String, + pub path: String, + pub permalink: String, + // this vec is sorted by the count of item + pub items: Vec, +} + +impl Taxonomy { + pub(crate) fn new(tax_found: TaxonomyFound, config: &Config) -> Self { + let mut sorted_items = vec![]; + let slug = tax_found.slug; + for (name, pages) in tax_found.terms { + sorted_items.push(TaxonomyTerm::new(name, tax_found.lang, &slug, &pages, config)); + } + + sorted_items.sort_by(|a, b| match a.slug.cmp(&b.slug) { + Ordering::Less => Ordering::Less, + Ordering::Greater => Ordering::Greater, + Ordering::Equal => a.name.cmp(&b.name), + }); + sorted_items.dedup_by(|a, b| { + // custom Eq impl checks for equal permalinks + // here we make sure all pages from a get copied to b + // before dedup gets rid of it + if a == b { + b.merge(a.to_owned()); + true + } else { + false + } + }); + let path = if tax_found.lang != config.default_language { + format!("/{}/{}/", tax_found.lang, slug) + } else { + format!("/{}/", slug) + }; + let permalink = config.make_permalink(&path); + + Taxonomy { + slug, + lang: tax_found.lang.to_owned(), + kind: tax_found.config.clone(), + path, + permalink, + items: sorted_items, + } + } + + pub fn render_term( + &self, + item: &TaxonomyTerm, + tera: &Tera, + config: &Config, + library: &Library, + ) -> Result { + let mut context = Context::new(); + context.insert("config", &config.serialize(&self.lang)); + context.insert("lang", &self.lang); + context.insert("term", &SerializedTaxonomyTerm::from_item(item, library)); + context.insert("taxonomy", &self.kind); + context.insert("current_url", &self.permalink); + context.insert("current_path", &self.path); + + // Check for taxon-specific template, or use generic as fallback. + let specific_template = format!("{}/single.html", self.kind.name); + let template = check_template_fallbacks(&specific_template, tera, &config.theme) + .unwrap_or("taxonomy_single.html"); + + render_template(template, tera, context, &config.theme) + .with_context(|| format!("Failed to render single term {} page.", self.kind.name)) + } + + pub fn render_all_terms( + &self, + tera: &Tera, + config: &Config, + library: &Library, + ) -> Result { + let mut context = Context::new(); + context.insert("config", &config.serialize(&self.lang)); + let terms: Vec = + self.items.iter().map(|i| SerializedTaxonomyTerm::from_item(i, library)).collect(); + context.insert("terms", &terms); + context.insert("lang", &self.lang); + context.insert("taxonomy", &self.kind); + context.insert("current_url", &self.permalink); + context.insert("current_path", &self.path); + + // Check for taxon-specific template, or use generic as fallback. + let specific_template = format!("{}/list.html", self.kind.name); + let template = check_template_fallbacks(&specific_template, tera, &config.theme) + .unwrap_or("taxonomy_list.html"); + + render_template(template, tera, context, &config.theme) + .with_context(|| format!("Failed to render a list of {} page.", self.kind.name)) + } + + pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { + SerializedTaxonomy::from_taxonomy(self, library) + } + + pub fn len(&self) -> usize { + self.items.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +/// Only used while building the taxonomies +#[derive(Debug, PartialEq)] +pub(crate) struct TaxonomyFound<'a> { + pub lang: &'a str, + pub slug: String, + pub config: &'a TaxonomyConfig, + pub terms: AHashMap<&'a str, Vec<&'a Page>>, +} + +impl<'a> TaxonomyFound<'a> { + pub fn new(slug: String, lang: &'a str, config: &'a TaxonomyConfig) -> Self { + Self { slug, lang, config, terms: AHashMap::new() } + } +} diff --git a/components/content/src/types.rs b/components/content/src/types.rs new file mode 100644 index 0000000000..e058dec9a3 --- /dev/null +++ b/components/content/src/types.rs @@ -0,0 +1,20 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum SortBy { + /// Most recent to oldest + Date, + /// Most recent to oldest + #[serde(rename = "update_date")] + UpdateDate, + /// Sort by title lexicographically + Title, + /// Sort by titles using the bytes directly + #[serde(rename = "title_bytes")] + TitleBytes, + /// Lower weight comes first + Weight, + /// No sorting + None, +} diff --git a/components/library/src/content/mod.rs b/components/content/src/utils.rs similarity index 75% rename from components/library/src/content/mod.rs rename to components/content/src/utils.rs index 73660cfe81..d44959c10d 100644 --- a/components/library/src/content/mod.rs +++ b/components/content/src/utils.rs @@ -1,19 +1,10 @@ -mod file_info; -mod page; -mod section; -mod ser; - use std::path::{Path, PathBuf}; -use walkdir::WalkDir; - -pub use self::file_info::FileInfo; -pub use self::page::Page; -pub use self::section::Section; -pub use self::ser::{SerializingPage, SerializingSection}; +use libs::unicode_segmentation::UnicodeSegmentation; +use libs::walkdir::WalkDir; use config::Config; -use rendering::Heading; +use utils::table_of_contents::Heading; pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool { for heading in headings { @@ -36,7 +27,7 @@ pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool { pub fn find_related_assets(path: &Path, config: &Config, recursive: bool) -> Vec { let mut assets = vec![]; - let mut builder = WalkDir::new(path); + let mut builder = WalkDir::new(path).follow_links(true); if !recursive { builder = builder.max_depth(1); } @@ -54,18 +45,21 @@ pub fn find_related_assets(path: &Path, config: &Config, recursive: bool) -> Vec } if let Some(ref globset) = config.ignored_content_globset { - assets = assets - .into_iter() - .filter(|p| match p.strip_prefix(path) { - Err(_) => false, - Ok(file) => !globset.is_match(file), - }) - .collect(); + assets = assets.into_iter().filter(|p| !globset.is_match(p)).collect(); } assets } +/// Get word count and estimated reading time +pub fn get_reading_analytics(content: &str) -> (usize, usize) { + let word_count: usize = content.unicode_words().count(); + + // https://help.medium.com/hc/en-us/articles/214991667-Read-time + // 275 seems a bit too high though + (word_count, ((word_count + 199) / 200)) +} + #[cfg(test)] mod tests { use super::*; @@ -89,13 +83,10 @@ mod tests { let assets = find_related_assets(path, &Config::default(), true); assert_eq!(assets.len(), 5); - assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or("".as_ref()) != "md").count(), 5); + assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or_default() != "md").count(), 5); - for asset in vec!["example.js", "graph.jpg", "fail.png", "subdir/example.js", "extensionless"] { - assert!(assets - .iter() - .find(|p| p.strip_prefix(path).unwrap() == Path::new(asset)) - .is_some()) + for asset in ["example.js", "graph.jpg", "fail.png", "subdir/example.js", "extensionless"] { + assert!(assets.iter().any(|p| p.strip_prefix(path).unwrap() == Path::new(asset))) } } @@ -113,13 +104,10 @@ mod tests { File::create(path.join("subdir").join("example.js")).unwrap(); let assets = find_related_assets(path, &Config::default(), false); assert_eq!(assets.len(), 4); - assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or("".as_ref()) != "md").count(), 4); + assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or_default() != "md").count(), 4); - for asset in vec!["example.js", "graph.jpg", "fail.png", "extensionless"] { - assert!(assets - .iter() - .find(|p| p.strip_prefix(path).unwrap() == Path::new(asset)) - .is_some()) + for asset in ["example.js", "graph.jpg", "fail.png", "extensionless"] { + assert!(assets.iter().any(|p| p.strip_prefix(path).unwrap() == Path::new(asset))) } } #[test] @@ -192,4 +180,29 @@ mod tests { assert!(has_anchor(&input, "1-2")); } + + #[test] + fn reading_analytics_empty_text() { + let (word_count, reading_time) = get_reading_analytics(" "); + assert_eq!(word_count, 0); + assert_eq!(reading_time, 0); + } + + #[test] + fn reading_analytics_short_text() { + let (word_count, reading_time) = get_reading_analytics("Hello World"); + assert_eq!(word_count, 2); + assert_eq!(reading_time, 1); + } + + #[test] + fn reading_analytics_long_text() { + let mut content = String::new(); + for _ in 0..1000 { + content.push_str(" Hello world"); + } + let (word_count, reading_time) = get_reading_analytics(&content); + assert_eq!(word_count, 2000); + assert_eq!(reading_time, 10); + } } diff --git a/components/errors/Cargo.toml b/components/errors/Cargo.toml index 48f746a7e1..9e7700dab2 100644 --- a/components/errors/Cargo.toml +++ b/components/errors/Cargo.toml @@ -1,11 +1,7 @@ [package] name = "errors" version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" +edition = "2021" [dependencies] -tera = "1" -toml = "0.5" -image = "0.23" -syntect = "4" +anyhow = "1.0.56" diff --git a/components/errors/src/lib.rs b/components/errors/src/lib.rs index dcaee80f42..d597aff987 100644 --- a/components/errors/src/lib.rs +++ b/components/errors/src/lib.rs @@ -1,119 +1 @@ -use std::convert::Into; -use std::error::Error as StdError; -use std::fmt; - -#[derive(Debug)] -pub enum ErrorKind { - Msg(String), - Tera(tera::Error), - Io(::std::io::Error), - Toml(toml::de::Error), - Image(image::ImageError), - Syntect(syntect::LoadingError), -} - -/// The Error type -#[derive(Debug)] -pub struct Error { - /// Kind of error - pub kind: ErrorKind, - pub source: Option>, -} - -impl StdError for Error { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self.source { - Some(ref err) => Some(&**err), - None => match self.kind { - ErrorKind::Tera(ref err) => err.source(), - _ => None, - }, - } - } -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.kind { - ErrorKind::Msg(ref message) => write!(f, "{}", message), - ErrorKind::Tera(ref e) => write!(f, "{}", e), - ErrorKind::Io(ref e) => write!(f, "{}", e), - ErrorKind::Toml(ref e) => write!(f, "{}", e), - ErrorKind::Image(ref e) => write!(f, "{}", e), - ErrorKind::Syntect(ref e) => write!(f, "{}", e), - } - } -} - -impl Error { - /// Creates generic error - pub fn msg(value: impl ToString) -> Self { - Self { kind: ErrorKind::Msg(value.to_string()), source: None } - } - - /// Creates generic error with a cause - pub fn chain(value: impl ToString, source: impl Into>) -> Self { - Self { kind: ErrorKind::Msg(value.to_string()), source: Some(source.into()) } - } - - /// Create an error from a list of path collisions, formatting the output - pub fn from_collisions(collisions: Vec<(String, Vec)>) -> Self { - let mut msg = String::from("Found path collisions:\n"); - - for (path, filepaths) in collisions { - let row = format!("- `{}` from files {:?}\n", path, filepaths); - msg.push_str(&row); - } - - Self { kind: ErrorKind::Msg(msg), source: None } - } -} - -impl From<&str> for Error { - fn from(e: &str) -> Self { - Self::msg(e) - } -} -impl From for Error { - fn from(e: String) -> Self { - Self::msg(e) - } -} -impl From for Error { - fn from(e: toml::de::Error) -> Self { - Self { kind: ErrorKind::Toml(e), source: None } - } -} -impl From for Error { - fn from(e: syntect::LoadingError) -> Self { - Self { kind: ErrorKind::Syntect(e), source: None } - } -} -impl From for Error { - fn from(e: tera::Error) -> Self { - Self { kind: ErrorKind::Tera(e), source: None } - } -} -impl From<::std::io::Error> for Error { - fn from(e: ::std::io::Error) -> Self { - Self { kind: ErrorKind::Io(e), source: None } - } -} -impl From for Error { - fn from(e: image::ImageError) -> Self { - Self { kind: ErrorKind::Image(e), source: None } - } -} -/// Convenient wrapper around std::Result. -pub type Result = ::std::result::Result; - -// So we can use bail! in all other crates -#[macro_export] -macro_rules! bail { - ($e:expr) => { - return Err($e.into()); - }; - ($fmt:expr, $($arg:tt)+) => { - return Err(format!($fmt, $($arg)+).into()); - }; -} +pub use anyhow::*; diff --git a/components/front_matter/Cargo.toml b/components/front_matter/Cargo.toml deleted file mode 100644 index 15b8e65043..0000000000 --- a/components/front_matter/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -name = "front_matter" -version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" - -[dependencies] -tera = "1" -chrono = "0.4" -serde = "1" -serde_derive = "1" -serde_yaml = "0.8" -toml = "0.5" -regex = "1" -lazy_static = "1" - -errors = { path = "../errors" } -utils = { path = "../utils" } - -[dev-dependencies] -test-case = "1" diff --git a/components/imageproc/Cargo.toml b/components/imageproc/Cargo.toml index 8b11f1e04b..7f05f975b1 100644 --- a/components/imageproc/Cargo.toml +++ b/components/imageproc/Cargo.toml @@ -1,24 +1,16 @@ [package] name = "imageproc" version = "0.1.0" -authors = ["Vojtěch Král "] -edition = "2018" +edition = "2021" [dependencies] -lazy_static = "1" -regex = "1.0" -tera = "1" -image = "0.23" -rayon = "1" -webp = "0.1.1" serde = { version = "1", features = ["derive"] } -svg_metadata = "0.4.1" +kamadak-exif = "0.5.4" errors = { path = "../errors" } utils = { path = "../utils" } config = { path = "../config" } +libs = { path = "../libs" } [dev-dependencies] -# TODO: prune -serde_json = "1" -site = { path = "../site" } +tempfile = "3" diff --git a/components/imageproc/src/lib.rs b/components/imageproc/src/lib.rs index fef75730c2..db56d4888e 100644 --- a/components/imageproc/src/lib.rs +++ b/components/imageproc/src/lib.rs @@ -1,6 +1,5 @@ use std::collections::hash_map::Entry as HEntry; use std::collections::HashMap; -use std::error::Error as StdError; use std::ffi::OsStr; use std::fs::{self, File}; use std::hash::{Hash, Hasher}; @@ -11,23 +10,23 @@ use image::error::ImageResult; use image::io::Reader as ImgReader; use image::{imageops::FilterType, EncodableLayout}; use image::{ImageFormat, ImageOutputFormat}; -use lazy_static::lazy_static; +use libs::image::DynamicImage; +use libs::{image, once_cell, rayon, regex, svg_metadata, webp}; +use once_cell::sync::Lazy; use rayon::prelude::*; use regex::Regex; use serde::{Deserialize, Serialize}; use svg_metadata::Metadata as SvgMetadata; use config::Config; -use errors::{Error, Result}; +use errors::{anyhow, Context, Error, Result}; use utils::fs as ufs; static RESIZED_SUBDIR: &str = "processed_images"; const DEFAULT_Q_JPG: u8 = 75; -lazy_static! { - pub static ref RESIZED_FILENAME: Regex = - Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.](jpg|png|webp)"#).unwrap(); -} +static RESIZED_FILENAME: Lazy = + Lazy::new(|| Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.](jpg|png|webp)"#).unwrap()); /// Size and format read cheaply with `image`'s `Reader`. #[derive(Debug)] @@ -84,22 +83,20 @@ impl ResizeArgs { match op { "fit_width" => { if width.is_none() { - return Err("op=\"fit_width\" requires a `width` argument".into()); + return Err(anyhow!("op=\"fit_width\" requires a `width` argument")); } } "fit_height" => { if height.is_none() { - return Err("op=\"fit_height\" requires a `height` argument".into()); + return Err(anyhow!("op=\"fit_height\" requires a `height` argument")); } } "scale" | "fit" | "fill" => { if width.is_none() || height.is_none() { - return Err( - format!("op={} requires a `width` and `height` argument", op).into() - ); + return Err(anyhow!("op={} requires a `width` and `height` argument", op)); } } - _ => return Err(format!("Invalid image resize operation: {}", op).into()), + _ => return Err(anyhow!("Invalid image resize operation: {}", op)), }; Ok(match op { @@ -225,7 +222,7 @@ impl Format { "jpeg" | "jpg" => Ok(Jpeg(jpg_quality)), "png" => Ok(Png), "webp" => Ok(WebP(quality)), - _ => Err(format!("Invalid image format: {}", format).into()), + _ => Err(anyhow!("Invalid image format: {}", format)), } } @@ -323,6 +320,8 @@ impl ImageOp { None => img, }; + let img = fix_orientation(&img, &self.input_path).unwrap_or(img); + let mut f = File::create(target_path)?; match self.format { @@ -333,7 +332,8 @@ impl ImageOp { img.write_to(&mut f, ImageOutputFormat::Jpeg(q))?; } Format::WebP(q) => { - let encoder = webp::Encoder::from_image(&img); + let encoder = webp::Encoder::from_image(&img) + .map_err(|_| anyhow!("Unable to load this kind of image with webp"))?; let memory = match q { Some(q) => encoder.encode(q as f32), None => encoder.encode_lossless(), @@ -346,6 +346,31 @@ impl ImageOp { } } +/// Apply image rotation based on EXIF data +/// Returns `None` if no transformation is needed +pub fn fix_orientation(img: &DynamicImage, path: &Path) -> Option { + let file = std::fs::File::open(path).ok()?; + let mut buf_reader = std::io::BufReader::new(&file); + let exif_reader = exif::Reader::new(); + let exif = exif_reader.read_from_container(&mut buf_reader).ok()?; + let orientation = + exif.get_field(exif::Tag::Orientation, exif::In::PRIMARY)?.value.get_uint(0)?; + match orientation { + // Values are taken from the page 30 of + // https://www.cipa.jp/std/documents/e/DC-008-2012_E.pdf + // For more details check http://sylvana.net/jpegcrop/exif_orientation.html + 1 => None, + 2 => Some(img.fliph()), + 3 => Some(img.rotate180()), + 4 => Some(img.flipv()), + 5 => Some(img.fliph().rotate270()), + 6 => Some(img.rotate90()), + 7 => Some(img.fliph().rotate90()), + 8 => Some(img.rotate270()), + _ => None, + } +} + #[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct EnqueueResponse { /// The final URL for that asset @@ -416,9 +441,8 @@ impl Processor { format: &str, quality: Option, ) -> Result { - let meta = ImageMeta::read(&input_path).map_err(|e| { - Error::chain(format!("Failed to read image: {}", input_path.display()), e) - })?; + let meta = ImageMeta::read(&input_path) + .with_context(|| format!("Failed to read image: {}", input_path.display()))?; let args = ResizeArgs::from_args(op, width, height)?; let op = ResizeOp::new(args, meta.size); @@ -530,8 +554,9 @@ impl Processor { .map(|(hash, op)| { let target = self.output_dir.join(Self::op_filename(*hash, op.collision_id, op.format)); - op.perform(&target).map_err(|e| { - Error::chain(format!("Failed to process image: {}", op.input_path.display()), e) + + op.perform(&target).with_context(|| { + format!("Failed to process image: {}", op.input_path.display()) }) }) .collect::>() @@ -572,29 +597,28 @@ pub fn read_image_metadata>(path: P) -> Result let path = path.as_ref(); let ext = path.extension().and_then(OsStr::to_str).unwrap_or("").to_lowercase(); - let error = |e: Box| { - Error::chain(format!("Failed to read image: {}", path.display()), e) - }; + let err_context = || format!("Failed to read image: {}", path.display()); match ext.as_str() { "svg" => { - let img = SvgMetadata::parse_file(&path).map_err(|e| error(e.into()))?; + let img = SvgMetadata::parse_file(&path).with_context(err_context)?; match (img.height(), img.width(), img.view_box()) { (Some(h), Some(w), _) => Ok((h, w)), (_, _, Some(view_box)) => Ok((view_box.height, view_box.width)), - _ => Err("Invalid dimensions: SVG width/height and viewbox not set.".into()), + _ => Err(anyhow!("Invalid dimensions: SVG width/height and viewbox not set.")), } - .map(|(h, w)| ImageMetaResponse::new_svg(h as u32, w as u32)) + //this is not a typo, this returns the correct values for width and height. + .map(|(h, w)| ImageMetaResponse::new_svg(w as u32, h as u32)) } "webp" => { - // Unfortunatelly we have to load the entire image here, unlike with the others :| - let data = fs::read(path).map_err(|e| error(e.into()))?; + // Unfortunately we have to load the entire image here, unlike with the others :| + let data = fs::read(path).with_context(err_context)?; let decoder = webp::Decoder::new(&data[..]); decoder.decode().map(ImageMetaResponse::from).ok_or_else(|| { Error::msg(format!("Failed to decode WebP image: {}", path.display())) }) } - _ => ImageMeta::read(path).map(ImageMetaResponse::from).map_err(|e| error(e.into())), + _ => ImageMeta::read(path).map(ImageMetaResponse::from).with_context(err_context), } } diff --git a/components/imageproc/tests/resize_image.rs b/components/imageproc/tests/resize_image.rs index e4e376b9f6..507de5e242 100644 --- a/components/imageproc/tests/resize_image.rs +++ b/components/imageproc/tests/resize_image.rs @@ -1,11 +1,10 @@ use std::env; use std::path::{PathBuf, MAIN_SEPARATOR as SLASH}; -use lazy_static::lazy_static; - use config::Config; -use imageproc::{assert_processed_path_matches, ImageMetaResponse, Processor}; -use utils::fs as ufs; +use imageproc::{assert_processed_path_matches, fix_orientation, ImageMetaResponse, Processor}; +use libs::image::{self, DynamicImage, GenericImageView, Pixel}; +use libs::once_cell::sync::Lazy; static CONFIG: &str = r#" title = "imageproc integration tests" @@ -17,18 +16,10 @@ build_search_index = false highlight_code = false "#; -lazy_static! { - static ref TEST_IMGS: PathBuf = - [env!("CARGO_MANIFEST_DIR"), "tests", "test_imgs"].iter().collect(); - static ref TMPDIR: PathBuf = { - let tmpdir = option_env!("CARGO_TARGET_TMPDIR").map(PathBuf::from).unwrap_or_else(|| { - env::current_exe().unwrap().parent().unwrap().parent().unwrap().join("tmpdir") - }); - ufs::ensure_directory_exists(&tmpdir).unwrap(); - tmpdir - }; - static ref PROCESSED_PREFIX: String = format!("static{0}processed_images{0}", SLASH); -} +static TEST_IMGS: Lazy = + Lazy::new(|| [env!("CARGO_MANIFEST_DIR"), "tests", "test_imgs"].iter().collect()); +static PROCESSED_PREFIX: Lazy = + Lazy::new(|| format!("static{0}processed_images{0}", SLASH)); #[allow(clippy::too_many_arguments)] fn image_op_test( @@ -44,9 +35,9 @@ fn image_op_test( orig_height: u32, ) { let source_path = TEST_IMGS.join(source_img); - + let tmpdir = tempfile::tempdir().unwrap().into_path(); let config = Config::parse(CONFIG).unwrap(); - let mut proc = Processor::new(TMPDIR.clone(), &config); + let mut proc = Processor::new(tmpdir.clone(), &config); let resp = proc.enqueue(source_img.into(), source_path, op, width, height, format, None).unwrap(); @@ -60,7 +51,7 @@ fn image_op_test( proc.do_process().unwrap(); let processed_path = PathBuf::from(&resp.static_path); - let processed_size = imageproc::read_image_metadata(&TMPDIR.join(processed_path)) + let processed_size = imageproc::read_image_metadata(&tmpdir.join(processed_path)) .map(|meta| (meta.width, meta.height)) .unwrap(); assert_eq!(processed_size, (expect_width, expect_height)); @@ -163,4 +154,75 @@ fn read_image_metadata_webp() { ); } +#[test] +fn fix_orientation_test() { + fn load_img_and_fix_orientation(img_name: &str) -> DynamicImage { + let path = TEST_IMGS.join(img_name); + let img = image::open(&path).unwrap(); + fix_orientation(&img, &path).unwrap_or(img) + } + + let img = image::open(TEST_IMGS.join("exif_1.jpg")).unwrap(); + assert!(check_img(img)); + assert!(check_img(load_img_and_fix_orientation("exif_0.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_1.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_2.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_3.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_4.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_5.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_6.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_7.jpg"))); + assert!(check_img(load_img_and_fix_orientation("exif_8.jpg"))); +} + +#[test] +fn resize_image_applies_exif_rotation() { + // No exif metadata + assert!(resize_and_check("exif_0.jpg")); + // 1: Horizontal (normal) + assert!(resize_and_check("exif_1.jpg")); + // 2: Mirror horizontal + assert!(resize_and_check("exif_2.jpg")); + // 3: Rotate 180 + assert!(resize_and_check("exif_3.jpg")); + // 4: Mirror vertical + assert!(resize_and_check("exif_4.jpg")); + // 5: Mirror horizontal and rotate 270 CW + assert!(resize_and_check("exif_5.jpg")); + // 6: Rotate 90 CW + assert!(resize_and_check("exif_6.jpg")); + // 7: Mirror horizontal and rotate 90 CW + assert!(resize_and_check("exif_7.jpg")); + // 8: Rotate 270 CW + assert!(resize_and_check("exif_8.jpg")); +} + +fn resize_and_check(source_img: &str) -> bool { + let source_path = TEST_IMGS.join(source_img); + let tmpdir = tempfile::tempdir().unwrap().into_path(); + let config = Config::parse(CONFIG).unwrap(); + let mut proc = Processor::new(tmpdir.clone(), &config); + + let resp = proc + .enqueue(source_img.into(), source_path, "scale", Some(16), Some(16), "jpg", None) + .unwrap(); + + proc.do_process().unwrap(); + let processed_path = PathBuf::from(&resp.static_path); + let img = image::open(&tmpdir.join(processed_path)).unwrap(); + check_img(img) +} + +// Checks that an image has the correct orientation +fn check_img(img: DynamicImage) -> bool { + // top left is red + img.get_pixel(0, 0)[0] > 250 // because of the jpeg compression some colors are a bit less than 255 + // top right is green + && img.get_pixel(15, 0)[1] > 250 + // bottom left is blue + && img.get_pixel(0, 15)[2] > 250 + // bottom right is white + && img.get_pixel(15, 15).channels() == [255, 255, 255, 255] +} + // TODO: Test that hash remains the same if physical path is changed diff --git a/components/imageproc/tests/test_imgs/exif_0.jpg b/components/imageproc/tests/test_imgs/exif_0.jpg new file mode 100644 index 0000000000..36beb6863c Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_0.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_1.jpg b/components/imageproc/tests/test_imgs/exif_1.jpg new file mode 100644 index 0000000000..c0bb69bf4d Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_1.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_2.jpg b/components/imageproc/tests/test_imgs/exif_2.jpg new file mode 100644 index 0000000000..e6f3ef37f4 Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_2.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_3.jpg b/components/imageproc/tests/test_imgs/exif_3.jpg new file mode 100644 index 0000000000..e5012999fa Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_3.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_4.jpg b/components/imageproc/tests/test_imgs/exif_4.jpg new file mode 100644 index 0000000000..807020eb05 Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_4.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_5.jpg b/components/imageproc/tests/test_imgs/exif_5.jpg new file mode 100644 index 0000000000..eb1495ed47 Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_5.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_6.jpg b/components/imageproc/tests/test_imgs/exif_6.jpg new file mode 100644 index 0000000000..64a4ec5f49 Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_6.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_7.jpg b/components/imageproc/tests/test_imgs/exif_7.jpg new file mode 100644 index 0000000000..a2acb70484 Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_7.jpg differ diff --git a/components/imageproc/tests/test_imgs/exif_8.jpg b/components/imageproc/tests/test_imgs/exif_8.jpg new file mode 100644 index 0000000000..9bebefa1a3 Binary files /dev/null and b/components/imageproc/tests/test_imgs/exif_8.jpg differ diff --git a/components/library/Cargo.toml b/components/library/Cargo.toml deleted file mode 100644 index d7122e0e2d..0000000000 --- a/components/library/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "library" -version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" - -[dependencies] -slotmap = "1" -rayon = "1" -chrono = { version = "0.4", features = ["serde"] } -tera = "1" -serde = "1" -serde_derive = "1" -regex = "1" -lazy_static = "1" -lexical-sort = "0.3" -walkdir = "2" - -front_matter = { path = "../front_matter" } -config = { path = "../config" } -utils = { path = "../utils" } -rendering = { path = "../rendering" } -errors = { path = "../errors" } - -[dev-dependencies] -tempfile = "3" -toml = "0.5" -globset = "0.4" diff --git a/components/library/src/content/ser.rs b/components/library/src/content/ser.rs deleted file mode 100644 index b506a6776f..0000000000 --- a/components/library/src/content/ser.rs +++ /dev/null @@ -1,352 +0,0 @@ -//! What we are sending to the templates when rendering them -use std::collections::HashMap; -use std::collections::HashSet; -use std::path::Path; - -use serde_derive::Serialize; -use tera::{Map, Value}; - -use crate::content::{Page, Section}; -use crate::library::Library; -use rendering::Heading; - -#[derive(Clone, Debug, PartialEq, Serialize)] -pub struct TranslatedContent<'a> { - lang: &'a str, - permalink: &'a str, - title: &'a Option, - /// The path to the markdown file; useful for retrieving the full page through - /// the `get_page` function. - path: &'a Path, -} - -impl<'a> TranslatedContent<'a> { - // copypaste eh, not worth creating an enum imo - pub fn find_all_sections(section: &'a Section, library: &'a Library) -> Vec { - let mut translations = vec![]; - - #[allow(clippy::or_fun_call)] - for key in library - .translations - .get(§ion.file.canonical) - .or(Some(&HashSet::new())) - .unwrap() - .iter() - { - let other = library.get_section_by_key(*key); - translations.push(TranslatedContent { - lang: &other.lang, - permalink: &other.permalink, - title: &other.meta.title, - path: &other.file.path, - }); - } - - translations - } - - pub fn find_all_pages(page: &'a Page, library: &'a Library) -> Vec { - let mut translations = vec![]; - - #[allow(clippy::or_fun_call)] - for key in - library.translations.get(&page.file.canonical).or(Some(&HashSet::new())).unwrap().iter() - { - let other = library.get_page_by_key(*key); - translations.push(TranslatedContent { - lang: &other.lang, - permalink: &other.permalink, - title: &other.meta.title, - path: &other.file.path, - }); - } - - translations - } -} - -#[derive(Clone, Debug, PartialEq, Serialize)] -pub struct SerializingPage<'a> { - relative_path: &'a str, - content: &'a str, - permalink: &'a str, - slug: &'a str, - ancestors: Vec<&'a str>, - title: &'a Option, - description: &'a Option, - updated: &'a Option, - date: &'a Option, - year: Option, - month: Option, - day: Option, - taxonomies: &'a HashMap>, - extra: &'a Map, - path: &'a str, - components: &'a [String], - summary: &'a Option, - toc: &'a [Heading], - word_count: Option, - reading_time: Option, - assets: &'a [String], - draft: bool, - lang: &'a str, - lighter: Option>>, - heavier: Option>>, - earlier_updated: Option>>, - later_updated: Option>>, - earlier: Option>>, - later: Option>>, - title_prev: Option>>, - title_next: Option>>, - translations: Vec>, -} - -impl<'a> SerializingPage<'a> { - /// Grabs all the data from a page, including sibling pages - pub fn from_page(page: &'a Page, library: &'a Library) -> Self { - let mut year = None; - let mut month = None; - let mut day = None; - if let Some(d) = page.meta.datetime_tuple { - year = Some(d.0); - month = Some(d.1); - day = Some(d.2); - } - let pages = library.pages(); - let lighter = page - .lighter - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let heavier = page - .heavier - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let earlier_updated = page - .earlier_updated - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let later_updated = page - .later_updated - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let earlier = page - .earlier - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let later = page - .later - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let title_prev = page - .title_prev - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let title_next = page - .title_next - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let ancestors = page - .ancestors - .iter() - .map(|k| library.get_section_by_key(*k).file.relative.as_str()) - .collect(); - - let translations = TranslatedContent::find_all_pages(page, library); - - SerializingPage { - relative_path: &page.file.relative, - ancestors, - content: &page.content, - permalink: &page.permalink, - slug: &page.slug, - title: &page.meta.title, - description: &page.meta.description, - extra: &page.meta.extra, - updated: &page.meta.updated, - date: &page.meta.date, - year, - month, - day, - taxonomies: &page.meta.taxonomies, - path: &page.path, - components: &page.components, - summary: &page.summary, - toc: &page.toc, - word_count: page.word_count, - reading_time: page.reading_time, - assets: &page.serialized_assets, - draft: page.meta.draft, - lang: &page.lang, - lighter, - heavier, - earlier_updated, - later_updated, - earlier, - later, - title_prev, - title_next, - translations, - } - } - - /// currently only used in testing - pub fn get_title(&'a self) -> &'a Option { - self.title - } - - /// Same as from_page but does not fill sibling pages - pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self { - let mut year = None; - let mut month = None; - let mut day = None; - if let Some(d) = page.meta.datetime_tuple { - year = Some(d.0); - month = Some(d.1); - day = Some(d.2); - } - let ancestors = if let Some(lib) = library { - page.ancestors - .iter() - .map(|k| lib.get_section_by_key(*k).file.relative.as_str()) - .collect() - } else { - vec![] - }; - - let translations = if let Some(lib) = library { - TranslatedContent::find_all_pages(page, lib) - } else { - vec![] - }; - - SerializingPage { - relative_path: &page.file.relative, - ancestors, - content: &page.content, - permalink: &page.permalink, - slug: &page.slug, - title: &page.meta.title, - description: &page.meta.description, - extra: &page.meta.extra, - updated: &page.meta.updated, - date: &page.meta.date, - year, - month, - day, - taxonomies: &page.meta.taxonomies, - path: &page.path, - components: &page.components, - summary: &page.summary, - toc: &page.toc, - word_count: page.word_count, - reading_time: page.reading_time, - assets: &page.serialized_assets, - draft: page.meta.draft, - lang: &page.lang, - lighter: None, - heavier: None, - earlier_updated: None, - later_updated: None, - earlier: None, - later: None, - title_prev: None, - title_next: None, - translations, - } - } -} - -#[derive(Clone, Debug, PartialEq, Serialize)] -pub struct SerializingSection<'a> { - relative_path: &'a str, - content: &'a str, - permalink: &'a str, - draft: bool, - ancestors: Vec<&'a str>, - title: &'a Option, - description: &'a Option, - extra: &'a Map, - path: &'a str, - components: &'a [String], - toc: &'a [Heading], - word_count: Option, - reading_time: Option, - lang: &'a str, - assets: &'a [String], - pages: Vec>, - subsections: Vec<&'a str>, - translations: Vec>, -} - -impl<'a> SerializingSection<'a> { - pub fn from_section(section: &'a Section, library: &'a Library) -> Self { - let mut pages = Vec::with_capacity(section.pages.len()); - let mut subsections = Vec::with_capacity(section.subsections.len()); - - for k in §ion.pages { - pages.push(library.get_page_by_key(*k).to_serialized_basic(library)); - } - - for k in §ion.subsections { - subsections.push(library.get_section_path_by_key(*k)); - } - - let ancestors = section - .ancestors - .iter() - .map(|k| library.get_section_by_key(*k).file.relative.as_str()) - .collect(); - let translations = TranslatedContent::find_all_sections(section, library); - - SerializingSection { - relative_path: §ion.file.relative, - ancestors, - draft: section.meta.draft, - content: §ion.content, - permalink: §ion.permalink, - title: §ion.meta.title, - description: §ion.meta.description, - extra: §ion.meta.extra, - path: §ion.path, - components: §ion.components, - toc: §ion.toc, - word_count: section.word_count, - reading_time: section.reading_time, - assets: §ion.serialized_assets, - lang: §ion.lang, - pages, - subsections, - translations, - } - } - - /// Same as from_section but doesn't fetch pages - pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self { - let mut ancestors = vec![]; - let mut translations = vec![]; - let mut subsections = vec![]; - if let Some(lib) = library { - ancestors = section - .ancestors - .iter() - .map(|k| lib.get_section_by_key(*k).file.relative.as_str()) - .collect(); - translations = TranslatedContent::find_all_sections(section, lib); - subsections = - section.subsections.iter().map(|k| lib.get_section_path_by_key(*k)).collect(); - } - - SerializingSection { - relative_path: §ion.file.relative, - ancestors, - draft: section.meta.draft, - content: §ion.content, - permalink: §ion.permalink, - title: §ion.meta.title, - description: §ion.meta.description, - extra: §ion.meta.extra, - path: §ion.path, - components: §ion.components, - toc: §ion.toc, - word_count: section.word_count, - reading_time: section.reading_time, - assets: §ion.serialized_assets, - lang: §ion.lang, - pages: vec![], - subsections, - translations, - } - } -} diff --git a/components/library/src/lib.rs b/components/library/src/lib.rs deleted file mode 100644 index 71c4b41ec1..0000000000 --- a/components/library/src/lib.rs +++ /dev/null @@ -1,13 +0,0 @@ -mod content; -mod library; -mod pagination; -mod sorting; -mod taxonomies; - -pub use slotmap::{DenseSlotMap, Key}; - -pub use crate::library::Library; -pub use content::{Page, Section, SerializingPage, SerializingSection}; -pub use pagination::Paginator; -pub use sorting::sort_actual_pages_by_date; -pub use taxonomies::{find_taxonomies, Taxonomy, TaxonomyItem}; diff --git a/components/library/src/library.rs b/components/library/src/library.rs deleted file mode 100644 index baa9209a31..0000000000 --- a/components/library/src/library.rs +++ /dev/null @@ -1,500 +0,0 @@ -use std::collections::{HashMap, HashSet}; -use std::path::{Path, PathBuf}; - -use slotmap::{DefaultKey, DenseSlotMap}; - -use crate::content::{Page, Section}; -use crate::sorting::{ - find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight, -}; -use config::Config; -use front_matter::{PageFrontMatter, SortBy}; - -// Like vec! but for HashSet -macro_rules! set { - ( $( $x:expr ),* ) => { - { - let mut s = HashSet::new(); - $( - s.insert($x); - )* - s - } - }; -} - -/// Houses everything about pages and sections -/// Think of it as a database where each page and section has an id (Key here) -/// that can be used to find the actual value -/// Sections and pages can then refer to other elements by those keys, which are very cheap to -/// copy. -/// We can assume the keys are always existing as removing a page/section deletes all references -/// to that key. -#[derive(Debug)] -pub struct Library { - /// All the pages of the site - pages: DenseSlotMap, - /// All the sections of the site - sections: DenseSlotMap, - /// A mapping path -> key for pages so we can easily get their key - pub paths_to_pages: HashMap, - /// A mapping path -> key for sections so we can easily get their key - pub paths_to_sections: HashMap, - /// Whether we need to look for translations - is_multilingual: bool, - - // aliases -> files, - // so we can easily check for conflicts - pub reverse_aliases: HashMap>, - - pub translations: HashMap>, -} - -impl Library { - pub fn new(cap_pages: usize, cap_sections: usize, is_multilingual: bool) -> Self { - Library { - pages: DenseSlotMap::with_capacity(cap_pages), - sections: DenseSlotMap::with_capacity(cap_sections), - paths_to_pages: HashMap::with_capacity(cap_pages), - paths_to_sections: HashMap::with_capacity(cap_sections), - is_multilingual, - reverse_aliases: HashMap::new(), - translations: HashMap::new(), - } - } - - fn insert_reverse_aliases(&mut self, entries: Vec, file_rel_path: &str) { - for entry in entries { - self.reverse_aliases - .entry(entry) - .and_modify(|s| { - s.insert(file_rel_path.to_owned()); - }) - .or_insert_with(|| { - let mut s = HashSet::new(); - s.insert(file_rel_path.to_owned()); - s - }); - } - } - - /// Add a section and return its Key - pub fn insert_section(&mut self, section: Section) -> DefaultKey { - let file_path = section.file.path.clone(); - let rel_path = section.path.clone(); - - let mut entries = vec![rel_path]; - entries.extend(section.meta.aliases.to_vec()); - self.insert_reverse_aliases(entries, §ion.file.relative); - - let key = self.sections.insert(section); - self.paths_to_sections.insert(file_path, key); - key - } - - /// Add a page and return its Key - pub fn insert_page(&mut self, page: Page) -> DefaultKey { - let file_path = page.file.path.clone(); - let rel_path = page.path.clone(); - - let mut entries = vec![rel_path]; - entries.extend(page.meta.aliases.to_vec()); - self.insert_reverse_aliases(entries, &page.file.relative); - - let key = self.pages.insert(page); - - self.paths_to_pages.insert(file_path, key); - key - } - - pub fn pages(&self) -> &DenseSlotMap { - &self.pages - } - - pub fn pages_mut(&mut self) -> &mut DenseSlotMap { - &mut self.pages - } - - pub fn pages_values(&self) -> Vec<&Page> { - self.pages.values().collect::>() - } - - pub fn sections(&self) -> &DenseSlotMap { - &self.sections - } - - pub fn sections_mut(&mut self) -> &mut DenseSlotMap { - &mut self.sections - } - - pub fn sections_values(&self) -> Vec<&Section> { - self.sections.values().collect::>() - } - - /// Find out the direct subsections of each subsection if there are some - /// as well as the pages for each section - pub fn populate_sections(&mut self, config: &Config) { - let root_path = - self.sections.values().find(|s| s.is_index()).map(|s| s.file.parent.clone()).unwrap(); - // We are going to get both the ancestors and grandparents for each section in one go - let mut ancestors: HashMap> = HashMap::new(); - let mut subsections: HashMap> = HashMap::new(); - - for (key, section) in self.sections.iter_mut() { - // Make sure the pages of a section are empty since we can call that many times on `serve` - section.pages = vec![]; - section.ignored_pages = vec![]; - - if let Some(ref grand_parent) = section.file.grand_parent { - subsections - // Using the original filename to work for multi-lingual sections - .entry(grand_parent.join(§ion.file.filename)) - .or_insert_with(Vec::new) - .push(section.file.path.clone()); - } - - // populate translations if necessary - if self.is_multilingual { - self.translations - .entry(section.file.canonical.clone()) - .and_modify(|trans| { - trans.insert(key); - }) - .or_insert(set![key]); - }; - - // Index has no ancestors, no need to go through it - if section.is_index() { - ancestors.insert(section.file.path.clone(), vec![]); - continue; - } - - let mut path = root_path.clone(); - let root_key = self.paths_to_sections[&root_path.join(§ion.file.filename)]; - // Index section is the first ancestor of every single section - let mut parents = vec![root_key]; - for component in §ion.file.components { - path = path.join(component); - // Skip itself - if path == section.file.parent { - continue; - } - if let Some(section_key) = - self.paths_to_sections.get(&path.join(§ion.file.filename)) - { - parents.push(*section_key); - } - } - ancestors.insert(section.file.path.clone(), parents); - } - - for (key, page) in &mut self.pages { - let parent_filename = if page.lang != config.default_language { - format!("_index.{}.md", page.lang) - } else { - "_index.md".to_string() - }; - let mut parent_section_path = page.file.parent.join(&parent_filename); - while let Some(section_key) = self.paths_to_sections.get(&parent_section_path) { - let parent_is_transparent; - // We need to get a reference to a section later so keep the scope of borrowing small - { - let section = self.sections.get_mut(*section_key).unwrap(); - section.pages.push(key); - parent_is_transparent = section.meta.transparent; - } - page.ancestors = - ancestors.get(&parent_section_path).cloned().unwrap_or_else(Vec::new); - // Don't forget to push the actual parent - page.ancestors.push(*section_key); - - // Find the page template if one of a parent has page_template set - // Stops after the first one found, keep in mind page.ancestors - // is [index, ..., parent] so we need to reverse it first - if page.meta.template.is_none() { - for ancestor in page.ancestors.iter().rev() { - let s = self.sections.get(*ancestor).unwrap(); - if s.meta.page_template.is_some() { - page.meta.template = s.meta.page_template.clone(); - break; - } - } - } - - if !parent_is_transparent { - break; - } - - // We've added `_index(.{LANG})?.md` so if we are here so we need to go up twice - match parent_section_path.clone().parent().unwrap().parent() { - Some(parent) => parent_section_path = parent.join(&parent_filename), - None => break, - } - } - - // populate translations if necessary - if self.is_multilingual { - self.translations - .entry(page.file.canonical.clone()) - .and_modify(|trans| { - trans.insert(key); - }) - .or_insert(set![key]); - }; - } - - self.sort_sections_pages(); - - let sections = self.paths_to_sections.clone(); - let mut sections_weight = HashMap::new(); - for (key, section) in &self.sections { - sections_weight.insert(key, section.meta.weight); - } - - for section in self.sections.values_mut() { - if let Some(children) = subsections.get(§ion.file.path) { - let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect(); - children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b])); - section.subsections = children; - } - section.ancestors = ancestors.get(§ion.file.path).cloned().unwrap_or_else(Vec::new); - } - } - - /// Sort all sections pages according to sorting method given - /// Pages that cannot be sorted are set to the section.ignored_pages instead - pub fn sort_sections_pages(&mut self) { - fn get_data<'a, T>( - section: &'a Section, - pages: &'a DenseSlotMap, - field: impl Fn(&'a PageFrontMatter) -> Option, - ) -> Vec<(&'a DefaultKey, Option, &'a str)> { - section - .pages - .iter() - .map(|k| { - if let Some(page) = pages.get(*k) { - (k, field(&page.meta), page.permalink.as_ref()) - } else { - unreachable!("Sorting got an unknown page") - } - }) - .collect() - } - - let mut updates = HashMap::new(); - for (key, section) in &self.sections { - let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by { - SortBy::None => continue, - SortBy::Date => { - let data = get_data(section, &self.pages, |meta| meta.datetime); - - sort_pages_by_date(data) - } - SortBy::UpdateDate => { - let data = get_data(section, &self.pages, |meta| { - std::cmp::max(meta.datetime, meta.updated_datetime) - }); - - sort_pages_by_date(data) - } - SortBy::Title => { - let data = get_data(section, &self.pages, |meta| meta.title.as_deref()); - - sort_pages_by_title(data) - } - SortBy::Weight => { - let data = get_data(section, &self.pages, |meta| meta.weight); - - sort_pages_by_weight(data) - } - }; - updates.insert(key, (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by)); - } - - for (key, (sorted, cannot_be_sorted, sort_by)) in updates { - let section_is_transparent = if let Some(section) = self.sections.get(key) { - section.meta.transparent - } else { - false - }; - - if !section_is_transparent { - // Find sibling between sorted pages first - let with_siblings = find_siblings(&sorted); - - for (k2, val1, val2) in with_siblings { - if let Some(page) = self.pages.get_mut(k2) { - match sort_by { - SortBy::Date => { - page.earlier = val2; - page.later = val1; - } - SortBy::UpdateDate => { - page.earlier_updated = val2; - page.later_updated = val1; - } - SortBy::Title => { - page.title_prev = val1; - page.title_next = val2; - } - SortBy::Weight => { - page.lighter = val1; - page.heavier = val2; - } - SortBy::None => { - unreachable!("Impossible to find siblings in SortBy::None") - } - } - } else { - unreachable!("Sorting got an unknown page") - } - } - } - - if let Some(s) = self.sections.get_mut(key) { - s.pages = sorted; - s.ignored_pages = cannot_be_sorted; - } - } - } - - /// Find all the orphan pages: pages that are in a folder without an `_index.md` - pub fn get_all_orphan_pages(&self) -> Vec<&Page> { - let pages_in_sections = - self.sections.values().flat_map(|s| &s.pages).collect::>(); - - self.pages - .iter() - .filter(|(key, _)| !pages_in_sections.contains(&key)) - .map(|(_, page)| page) - .collect() - } - - /// Used in integration tests - pub fn get_section_key>(&self, path: P) -> Option<&DefaultKey> { - self.paths_to_sections.get(path.as_ref()) - } - - pub fn get_section>(&self, path: P) -> Option<&Section> { - self.sections.get(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default()) - } - - /// Used in integration tests - pub fn get_section_mut>(&mut self, path: P) -> Option<&mut Section> { - self.sections - .get_mut(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default()) - } - - pub fn get_section_by_key(&self, key: DefaultKey) -> &Section { - self.sections.get(key).unwrap() - } - - pub fn get_section_path_by_key(&self, key: DefaultKey) -> &str { - &self.get_section_by_key(key).file.relative - } - - pub fn get_page>(&self, path: P) -> Option<&Page> { - self.pages.get(self.paths_to_pages.get(path.as_ref()).cloned().unwrap_or_default()) - } - - pub fn get_page_by_key(&self, key: DefaultKey) -> &Page { - self.pages.get(key).unwrap() - } - - pub fn remove_section>(&mut self, path: P) -> Option
{ - if let Some(k) = self.paths_to_sections.remove(path.as_ref()) { - self.sections.remove(k) - } else { - None - } - } - - pub fn remove_page>(&mut self, path: P) -> Option { - if let Some(k) = self.paths_to_pages.remove(path.as_ref()) { - self.pages.remove(k) - } else { - None - } - } - - pub fn contains_section>(&self, path: P) -> bool { - self.paths_to_sections.contains_key(path.as_ref()) - } - - /// This will check every section/page paths + the aliases and ensure none of them - /// are colliding. - /// Returns (path colliding, [list of files causing that collision]) - pub fn check_for_path_collisions(&self) -> Vec<(String, Vec)> { - self.reverse_aliases - .iter() - .filter_map(|(alias, files)| { - if files.len() > 1 { - Some((alias.clone(), files.clone().into_iter().collect::>())) - } else { - None - } - }) - .collect() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_find_no_collisions() { - let mut library = Library::new(10, 10, false); - let page = Page { path: "hello".to_string(), ..Default::default() }; - let page2 = Page { path: "hello-world".to_string(), ..Default::default() }; - let section = Section { path: "blog".to_string(), ..Default::default() }; - library.insert_page(page); - library.insert_page(page2); - library.insert_section(section); - - let collisions = library.check_for_path_collisions(); - assert_eq!(collisions.len(), 0); - } - - #[test] - fn can_find_collisions_between_pages() { - let mut library = Library::new(10, 10, false); - let mut page = Page { path: "hello".to_string(), ..Default::default() }; - page.file.relative = "hello".to_string(); - let mut page2 = Page { path: "hello".to_string(), ..Default::default() }; - page2.file.relative = "hello-world".to_string(); - let mut section = Section { path: "blog".to_string(), ..Default::default() }; - section.file.relative = "hello-world".to_string(); - library.insert_page(page.clone()); - library.insert_page(page2.clone()); - library.insert_section(section); - - let collisions = library.check_for_path_collisions(); - assert_eq!(collisions.len(), 1); - assert_eq!(collisions[0].0, page.path); - assert!(collisions[0].1.contains(&page.file.relative)); - assert!(collisions[0].1.contains(&page2.file.relative)); - } - - #[test] - fn can_find_collisions_with_an_alias() { - let mut library = Library::new(10, 10, false); - let mut page = Page { path: "hello".to_string(), ..Default::default() }; - page.file.relative = "hello".to_string(); - let mut page2 = Page { path: "hello".to_string(), ..Default::default() }; - page2.file.relative = "hello-world".to_string(); - page2.meta.aliases = vec!["hello".to_string()]; - let mut section = Section { path: "blog".to_string(), ..Default::default() }; - section.file.relative = "hello-world".to_string(); - library.insert_page(page.clone()); - library.insert_page(page2.clone()); - library.insert_section(section); - - let collisions = library.check_for_path_collisions(); - assert_eq!(collisions.len(), 1); - assert_eq!(collisions[0].0, page.path); - assert!(collisions[0].1.contains(&page.file.relative)); - assert!(collisions[0].1.contains(&page2.file.relative)); - } -} diff --git a/components/library/src/sorting.rs b/components/library/src/sorting.rs deleted file mode 100644 index f0b8f9b3f4..0000000000 --- a/components/library/src/sorting.rs +++ /dev/null @@ -1,271 +0,0 @@ -use std::cmp::Ordering; - -use chrono::NaiveDateTime; -use lexical_sort::natural_lexical_cmp; -use rayon::prelude::*; -use slotmap::DefaultKey; - -use crate::content::Page; - -/// Used by the feed -/// There to not have to import sorting stuff in the site crate -#[allow(clippy::trivially_copy_pass_by_ref)] -pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering { - let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); - if ord == Ordering::Equal { - a.permalink.cmp(&b.permalink) - } else { - ord - } -} - -/// Takes a list of (page key, date, permalink) and sort them by dates if possible -/// Pages without date will be put in the unsortable bucket -/// The permalink is used to break ties -pub fn sort_pages_by_date( - pages: Vec<(&DefaultKey, Option, &str)>, -) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = - pages.into_par_iter().partition(|page| page.1.is_some()); - - can_be_sorted.par_sort_unstable_by(|a, b| { - let ord = b.1.unwrap().cmp(&a.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(b.2) - } else { - ord - } - }); - - (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) -} - -/// Takes a list of (page key, title, permalink) and sort them by title if possible. -/// Uses the a natural lexical comparison as defined by the lexical_sort crate. -/// Pages without title will be put in the unsortable bucket. -/// The permalink is used to break ties. -pub fn sort_pages_by_title( - pages: Vec<(&DefaultKey, Option<&str>, &str)>, -) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = - pages.into_par_iter().partition(|page| page.1.is_some()); - - can_be_sorted.par_sort_unstable_by(|a, b| { - let ord = natural_lexical_cmp(a.1.unwrap(), b.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(b.2) - } else { - ord - } - }); - - (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) -} - -/// Takes a list of (page key, weight, permalink) and sort them by weight if possible -/// Pages without weight will be put in the unsortable bucket -/// The permalink is used to break ties -pub fn sort_pages_by_weight( - pages: Vec<(&DefaultKey, Option, &str)>, -) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = - pages.into_par_iter().partition(|page| page.1.is_some()); - - can_be_sorted.par_sort_unstable_by(|a, b| { - let ord = a.1.unwrap().cmp(&b.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(b.2) - } else { - ord - } - }); - - (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) -} - -/// Find the lighter/heavier, earlier/later, and title_prev/title_next -/// pages for all pages having a date/weight/title -pub fn find_siblings( - sorted: &[DefaultKey], -) -> Vec<(DefaultKey, Option, Option)> { - let mut res = Vec::with_capacity(sorted.len()); - let length = sorted.len(); - - for (i, key) in sorted.iter().enumerate() { - let mut with_siblings = (*key, None, None); - - if i > 0 { - // lighter / later / title_prev - with_siblings.1 = Some(sorted[i - 1]); - } - - if i < length - 1 { - // heavier / earlier / title_next - with_siblings.2 = Some(sorted[i + 1]); - } - res.push(with_siblings); - } - - res -} - -#[cfg(test)] -mod tests { - use slotmap::DenseSlotMap; - use std::path::PathBuf; - - use super::{find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight}; - use crate::content::Page; - use front_matter::PageFrontMatter; - - fn create_page_with_date(date: &str) -> Page { - let mut front_matter = - PageFrontMatter { date: Some(date.to_string()), ..Default::default() }; - front_matter.date_to_datetime(); - Page::new("content/hello.md", front_matter, &PathBuf::new()) - } - - fn create_page_with_title(title: &str) -> Page { - let front_matter = PageFrontMatter { title: Some(title.to_string()), ..Default::default() }; - Page::new("content/hello.md", front_matter, &PathBuf::new()) - } - - fn create_page_with_weight(weight: usize) -> Page { - let front_matter = PageFrontMatter { weight: Some(weight), ..Default::default() }; - Page::new("content/hello.md", front_matter, &PathBuf::new()) - } - - #[test] - fn can_sort_by_dates() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_date("2018-01-01"); - let key1 = dense.insert(page1.clone()); - let page2 = create_page_with_date("2017-01-01"); - let key2 = dense.insert(page2.clone()); - let page3 = create_page_with_date("2019-01-01"); - let key3 = dense.insert(page3.clone()); - - let input = vec![ - (&key1, page1.meta.datetime, page1.permalink.as_ref()), - (&key2, page2.meta.datetime, page2.permalink.as_ref()), - (&key3, page3.meta.datetime, page3.permalink.as_ref()), - ]; - let (pages, _) = sort_pages_by_date(input); - // Should be sorted by date - assert_eq!(pages[0], key3); - assert_eq!(pages[1], key1); - assert_eq!(pages[2], key2); - } - - #[test] - fn can_sort_by_titles() { - let titles = vec![ - "bagel", - "track_3", - "microkernel", - "métro", - "BART", - "Underground", - "track_13", - "μ-kernel", - "meter", - "track_1", - ]; - let pages: Vec = titles.iter().map(|title| create_page_with_title(title)).collect(); - let mut dense = DenseSlotMap::new(); - let keys: Vec<_> = pages.iter().map(|p| dense.insert(p)).collect(); - let input: Vec<_> = pages - .iter() - .enumerate() - .map(|(i, page)| (&keys[i], page.meta.title.as_deref(), page.permalink.as_ref())) - .collect(); - let (sorted, _) = sort_pages_by_title(input); - // Should be sorted by title - let sorted_titles: Vec<_> = sorted - .iter() - .map(|key| dense.get(*key).unwrap().meta.title.as_ref().unwrap()) - .collect(); - assert_eq!( - sorted_titles, - vec![ - "bagel", - "BART", - "μ-kernel", - "meter", - "métro", - "microkernel", - "track_1", - "track_3", - "track_13", - "Underground", - ] - ); - } - - #[test] - fn can_sort_by_weight() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_weight(2); - let key1 = dense.insert(page1.clone()); - let page2 = create_page_with_weight(3); - let key2 = dense.insert(page2.clone()); - let page3 = create_page_with_weight(1); - let key3 = dense.insert(page3.clone()); - - let input = vec![ - (&key1, page1.meta.weight, page1.permalink.as_ref()), - (&key2, page2.meta.weight, page2.permalink.as_ref()), - (&key3, page3.meta.weight, page3.permalink.as_ref()), - ]; - let (pages, _) = sort_pages_by_weight(input); - // Should be sorted by weight - assert_eq!(pages[0], key3); - assert_eq!(pages[1], key1); - assert_eq!(pages[2], key2); - } - - #[test] - fn ignore_page_with_missing_field() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_weight(2); - let key1 = dense.insert(page1.clone()); - let page2 = create_page_with_weight(3); - let key2 = dense.insert(page2.clone()); - let page3 = create_page_with_date("2019-01-01"); - let key3 = dense.insert(page3.clone()); - - let input = vec![ - (&key1, page1.meta.weight, page1.permalink.as_ref()), - (&key2, page2.meta.weight, page2.permalink.as_ref()), - (&key3, page3.meta.weight, page3.permalink.as_ref()), - ]; - - let (pages, unsorted) = sort_pages_by_weight(input); - assert_eq!(pages.len(), 2); - assert_eq!(unsorted.len(), 1); - } - - #[test] - fn can_find_siblings() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_weight(1); - let key1 = dense.insert(page1); - let page2 = create_page_with_weight(2); - let key2 = dense.insert(page2); - let page3 = create_page_with_weight(3); - let key3 = dense.insert(page3); - - let input = vec![key1, key2, key3]; - - let pages = find_siblings(&input); - - assert_eq!(pages[0].1, None); - assert_eq!(pages[0].2, Some(key2)); - - assert_eq!(pages[1].1, Some(key1)); - assert_eq!(pages[1].2, Some(key3)); - - assert_eq!(pages[2].1, Some(key2)); - assert_eq!(pages[2].2, None); - } -} diff --git a/components/library/src/taxonomies/mod.rs b/components/library/src/taxonomies/mod.rs deleted file mode 100644 index 99ed14c57b..0000000000 --- a/components/library/src/taxonomies/mod.rs +++ /dev/null @@ -1,930 +0,0 @@ -use std::cmp::Ordering; -use std::collections::HashMap; - -use serde_derive::Serialize; -use slotmap::DefaultKey; -use tera::{Context, Tera}; - -use config::{Config, Taxonomy as TaxonomyConfig}; -use errors::{bail, Error, Result}; -use utils::templates::{check_template_fallbacks, render_template}; - -use crate::content::SerializingPage; -use crate::library::Library; -use crate::sorting::sort_pages_by_date; -use utils::slugs::slugify_paths; - -#[derive(Debug, Clone, PartialEq, Serialize)] -pub struct SerializedTaxonomyItem<'a> { - name: &'a str, - slug: &'a str, - path: &'a str, - permalink: &'a str, - pages: Vec>, -} - -impl<'a> SerializedTaxonomyItem<'a> { - pub fn from_item(item: &'a TaxonomyItem, library: &'a Library) -> Self { - let mut pages = vec![]; - - for key in &item.pages { - let page = library.get_page_by_key(*key); - pages.push(page.to_serialized_basic(library)); - } - - SerializedTaxonomyItem { - name: &item.name, - slug: &item.slug, - path: &item.path, - permalink: &item.permalink, - pages, - } - } -} - -/// A taxonomy with all its pages -#[derive(Debug, Clone)] -pub struct TaxonomyItem { - pub name: String, - pub slug: String, - pub path: String, - pub permalink: String, - pub pages: Vec, -} - -impl TaxonomyItem { - pub fn new( - name: &str, - lang: &str, - taxo_slug: &str, - config: &Config, - keys: Vec, - library: &Library, - ) -> Self { - // Taxonomy are almost always used for blogs so we filter by dates - // and it's not like we can sort things across sections by anything other - // than dates - let data = keys - .iter() - .map(|k| { - if let Some(page) = library.pages().get(*k) { - (k, page.meta.datetime, page.permalink.as_ref()) - } else { - unreachable!("Sorting got an unknown page") - } - }) - .collect(); - let (mut pages, ignored_pages) = sort_pages_by_date(data); - let item_slug = slugify_paths(name, config.slugify.taxonomies); - let path = if lang != config.default_language { - format!("/{}/{}/{}/", lang, taxo_slug, item_slug) - } else { - format!("/{}/{}/", taxo_slug, item_slug) - }; - let permalink = config.make_permalink(&path); - - // We still append pages without dates at the end - pages.extend(ignored_pages); - - TaxonomyItem { name: name.to_string(), permalink, path, slug: item_slug, pages } - } - - pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializedTaxonomyItem<'a> { - SerializedTaxonomyItem::from_item(self, library) - } - - pub fn merge(&mut self, other: Self) { - self.pages.extend(other.pages); - } -} - -impl PartialEq for TaxonomyItem { - fn eq(&self, other: &Self) -> bool { - self.permalink == other.permalink - } -} - -#[derive(Debug, Clone, PartialEq, Serialize)] -pub struct SerializedTaxonomy<'a> { - kind: &'a TaxonomyConfig, - lang: &'a str, - permalink: &'a str, - items: Vec>, -} - -impl<'a> SerializedTaxonomy<'a> { - pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self { - let items: Vec = - taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); - SerializedTaxonomy { - kind: &taxonomy.kind, - lang: &taxonomy.lang, - permalink: &taxonomy.permalink, - items, - } - } -} - -/// All different taxonomies we have and their content -#[derive(Debug, Clone, PartialEq)] -pub struct Taxonomy { - pub kind: TaxonomyConfig, - pub lang: String, - pub slug: String, - pub permalink: String, - // this vec is sorted by the count of item - pub items: Vec, -} - -impl Taxonomy { - fn new( - kind: TaxonomyConfig, - lang: &str, - config: &Config, - items: HashMap>, - library: &Library, - ) -> Taxonomy { - let mut sorted_items = vec![]; - let slug = slugify_paths(&kind.name, config.slugify.taxonomies); - for (name, pages) in items { - sorted_items.push(TaxonomyItem::new(&name, lang, &slug, config, pages, library)); - } - //sorted_items.sort_by(|a, b| a.name.cmp(&b.name)); - sorted_items.sort_by(|a, b| match a.slug.cmp(&b.slug) { - Ordering::Less => Ordering::Less, - Ordering::Greater => Ordering::Greater, - Ordering::Equal => a.name.cmp(&b.name), - }); - sorted_items.dedup_by(|a, b| { - // custom Eq impl checks for equal permalinks - // here we make sure all pages from a get copied to b - // before dedup gets rid of it - if a == b { - b.merge(a.to_owned()); - true - } else { - false - } - }); - let path = if lang != config.default_language { - format!("/{}/{}/", lang, slug) - } else { - format!("/{}/", slug) - }; - let permalink = config.make_permalink(&path); - - Taxonomy { kind, slug, lang: lang.to_owned(), permalink, items: sorted_items } - } - - pub fn len(&self) -> usize { - self.items.len() - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - pub fn render_term( - &self, - item: &TaxonomyItem, - tera: &Tera, - config: &Config, - library: &Library, - ) -> Result { - let mut context = Context::new(); - context.insert("config", &config.serialize(&self.lang)); - context.insert("lang", &self.lang); - context.insert("term", &SerializedTaxonomyItem::from_item(item, library)); - context.insert("taxonomy", &self.kind); - context.insert( - "current_url", - &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)), - ); - context.insert("current_path", &format!("/{}/{}/", self.kind.name, item.slug)); - - // Check for taxon-specific template, or use generic as fallback. - let specific_template = format!("{}/single.html", self.kind.name); - let template = match check_template_fallbacks(&specific_template, tera, &config.theme) { - Some(template) => template, - None => "taxonomy_single.html", - }; - - render_template(&template, tera, context, &config.theme).map_err(|e| { - Error::chain(format!("Failed to render single term {} page.", self.kind.name), e) - }) - } - - pub fn render_all_terms( - &self, - tera: &Tera, - config: &Config, - library: &Library, - ) -> Result { - let mut context = Context::new(); - context.insert("config", &config.serialize(&self.lang)); - let terms: Vec = - self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); - context.insert("terms", &terms); - context.insert("lang", &self.lang); - context.insert("taxonomy", &self.kind); - context.insert("current_url", &config.make_permalink(&self.kind.name)); - context.insert("current_path", &format!("/{}/", self.kind.name)); - - // Check for taxon-specific template, or use generic as fallback. - let specific_template = format!("{}/list.html", self.kind.name); - let template = match check_template_fallbacks(&specific_template, tera, &config.theme) { - Some(template) => template, - None => "taxonomy_list.html", - }; - - render_template(&template, tera, context, &config.theme).map_err(|e| { - Error::chain(format!("Failed to render a list of {} page.", self.kind.name), e) - }) - } - - pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { - SerializedTaxonomy::from_taxonomy(self, library) - } -} - -pub fn find_taxonomies(config: &Config, library: &Library) -> Result> { - let mut slugs_to_lang = HashMap::new(); - - let taxonomies_def = { - let mut m = HashMap::new(); - // the default language taxonomies - for t in &config.taxonomies { - let slug = slugify_paths(&t.name, config.slugify.taxonomies); - let key = format!("{}-{}", slug, config.default_language); - slugs_to_lang.insert(key.clone(), config.default_language.as_str()); - m.insert(key, t); - } - - // other languages taxonomies - for (code, options) in config.other_languages() { - for t in &options.taxonomies { - let slug = slugify_paths(&t.name, config.slugify.taxonomies); - let key = format!("{}-{}", slug, code); - slugs_to_lang.insert(key.clone(), code); - m.insert(key, t); - } - } - m - }; - - let mut all_taxonomies = HashMap::new(); - for (key, page) in library.pages() { - for (name, taxo_term) in &page.meta.taxonomies { - let taxo_slug = slugify_paths(name, config.slugify.taxonomies); - let taxo_key = format!("{}-{}", &taxo_slug, page.lang); - if taxonomies_def.contains_key(&taxo_key) { - all_taxonomies.entry(taxo_key.clone()).or_insert_with(HashMap::new); - - for term in taxo_term { - all_taxonomies - .get_mut(&taxo_key) - .unwrap() - .entry(term.to_string()) - .or_insert_with(Vec::new) - .push(key); - } - } else { - bail!( - "Page `{}` has taxonomy `{}` which is not defined in config.toml", - page.file.path.display(), - name - ); - } - } - } - - let mut taxonomies = vec![]; - - for (name, taxo) in all_taxonomies { - taxonomies.push(Taxonomy::new( - taxonomies_def[&name].clone(), - slugs_to_lang[&name], - config, - taxo, - library, - )); - } - - Ok(taxonomies) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::collections::HashMap; - - use crate::content::Page; - use crate::library::Library; - use config::{Config, LanguageOptions, Slugify, Taxonomy as TaxonomyConfig}; - use utils::slugs::SlugifyStrategy; - - #[test] - fn can_make_taxonomies() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); - taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => t = Some(x), - "categories" => c = Some(x), - "authors" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - assert_eq!(tags.items.len(), 3); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].path, "/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "js"); - assert_eq!(tags.items[1].slug, "js"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(tags.items[2].name, "rust"); - assert_eq!(tags.items[2].slug, "rust"); - assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[2].pages.len(), 2); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn can_make_slugified_taxonomies() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); - taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => t = Some(x), - "categories" => c = Some(x), - "authors" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - assert_eq!(tags.items.len(), 3); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "js"); - assert_eq!(tags.items[1].slug, "js"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/"); - assert_eq!(tags.items[1].path, "/tags/js/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(tags.items[2].name, "rust"); - assert_eq!(tags.items[2].slug, "rust"); - assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[2].pages.len(), 2); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn errors_on_unknown_taxonomy() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = - vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }]; - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let taxonomies = find_taxonomies(&config, &library); - assert!(taxonomies.is_err()); - let err = taxonomies.unwrap_err(); - // no path as this is created by Default - assert_eq!( - format!("{}", err), - "Page `` has taxonomy `tags` which is not defined in config.toml" - ); - } - - #[test] - fn can_make_taxonomies_in_multiple_languages() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); - let mut library = Library::new(2, 0, true); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let french_taxo = vec![ - TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let lang_options = config::LanguageOptions { - taxonomies: french_taxo, - ..config::LanguageOptions::default() - }; - config.languages.insert("fr".to_owned(), lang_options); - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page { lang: "fr".to_string(), ..Default::default() }; - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => { - if x.lang == "en" { - t = Some(x) - } - } - "categories" => c = Some(x), - "auteurs" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - - assert_eq!(tags.items.len(), 2); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "rust"); - assert_eq!(tags.items[1].slug, "rust"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(authors.items[0].name, "Vincent Prouillet"); - assert_eq!(authors.items[0].slug, "vincent-prouillet"); - assert_eq!( - authors.items[0].permalink, - "http://a-website.com/fr/auteurs/vincent-prouillet/" - ); - assert_eq!(authors.items[0].pages.len(), 1); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn can_make_utf8_taxonomies() { - let mut config = Config::default(); - config.slugify.taxonomies = SlugifyStrategy::Safe; - let mut library = Library::new(2, 0, true); - - let french_taxo = - vec![TaxonomyConfig { name: "catégories".to_string(), ..TaxonomyConfig::default() }]; - let lang_options = config::LanguageOptions { - taxonomies: french_taxo, - ..config::LanguageOptions::default() - }; - config.languages.insert("fr".to_owned(), lang_options); - - let mut page = Page { lang: "fr".to_string(), ..Default::default() }; - let mut taxo_page = HashMap::new(); - taxo_page.insert("catégories".to_string(), vec!["Écologie".to_string()]); - page.meta.taxonomies = taxo_page; - library.insert_page(page); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let categories = &taxonomies[0]; - - assert_eq!(categories.items.len(), 1); - assert_eq!(categories.items[0].name, "Écologie"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/fr/catégories/Écologie/"); - assert_eq!(categories.items[0].pages.len(), 1); - } - - #[test] - fn can_make_slugified_taxonomies_in_multiple_languages() { - let mut config = Config::default(); - config.slugify.taxonomies = SlugifyStrategy::On; - let mut library = Library::new(2, 0, true); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let french_taxo = vec![ - TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let lang_options = config::LanguageOptions { - taxonomies: french_taxo, - ..config::LanguageOptions::default() - }; - config.languages.insert("fr".to_owned(), lang_options); - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page { lang: "fr".to_string(), ..Default::default() }; - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => { - if x.lang == "en" { - t = Some(x) - } - } - "categories" => c = Some(x), - "auteurs" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - - assert_eq!(tags.items.len(), 2); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "rust"); - assert_eq!(tags.items[1].slug, "rust"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(authors.items[0].name, "Vincent Prouillet"); - assert_eq!(authors.items[0].slug, "vincent-prouillet"); - assert_eq!( - authors.items[0].permalink, - "http://a-website.com/fr/auteurs/vincent-prouillet/" - ); - assert_eq!(authors.items[0].pages.len(), 1); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn taxonomies_are_groupted_by_permalink() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert( - "test-taxonomy".to_string(), - vec!["term one".to_string(), "term two".to_string()], - ); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert( - "test taxonomy".to_string(), - vec!["Term Two".to_string(), "term-one".to_string()], - ); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("test-taxonomy ".to_string(), vec!["term one ".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let mut page4 = Page::default(); - let mut taxo_page4 = HashMap::new(); - taxo_page4.insert("Test-Taxonomy ".to_string(), vec!["Term-Two ".to_string()]); - page4.meta.taxonomies = taxo_page4; - page4.lang = config.default_language.clone(); - library.insert_page(page4); - - // taxonomies should all be the same - let taxonomies = find_taxonomies(&config, &library).unwrap(); - assert_eq!(taxonomies.len(), 1); - - let tax = &taxonomies[0]; - - // terms should be "term one", "term two" - assert_eq!(tax.items.len(), 2); - - let term1 = &tax.items[0]; - let term2 = &tax.items[1]; - - assert_eq!(term1.name, "term one"); - assert_eq!(term1.slug, "term-one"); - assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/term-one/"); - assert_eq!(term1.pages.len(), 3); - - assert_eq!(term2.name, "Term Two"); - assert_eq!(term2.slug, "term-two"); - assert_eq!(term2.permalink, "http://a-website.com/test-taxonomy/term-two/"); - assert_eq!(term2.pages.len(), 3); - } - - #[test] - fn taxonomies_with_unic_are_grouped_with_default_slugify_strategy() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("test-taxonomy".to_string(), vec!["Ecole".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("test taxonomy".to_string(), vec!["École".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("test-taxonomy ".to_string(), vec!["ecole".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let mut page4 = Page::default(); - let mut taxo_page4 = HashMap::new(); - taxo_page4.insert("Test-Taxonomy ".to_string(), vec!["école".to_string()]); - page4.meta.taxonomies = taxo_page4; - page4.lang = config.default_language.clone(); - library.insert_page(page4); - - // taxonomies should all be the same - let taxonomies = find_taxonomies(&config, &library).unwrap(); - assert_eq!(taxonomies.len(), 1); - - let tax = &taxonomies[0]; - - // under the default slugify stratagy all of the provided terms should be the same - assert_eq!(tax.items.len(), 1); - - let term1 = &tax.items[0]; - - assert_eq!(term1.name, "Ecole"); - assert_eq!(term1.slug, "ecole"); - assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/ecole/"); - assert_eq!(term1.pages.len(), 4); - } - - #[test] - fn taxonomies_with_unic_are_not_grouped_with_safe_slugify_strategy() { - let mut config = Config::default(); - config.slugify = Slugify { - paths: SlugifyStrategy::Safe, - taxonomies: SlugifyStrategy::Safe, - anchors: SlugifyStrategy::Safe, - }; - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("test-taxonomy".to_string(), vec!["Ecole".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("test-taxonomy".to_string(), vec!["École".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("test-taxonomy".to_string(), vec!["ecole".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let mut page4 = Page::default(); - let mut taxo_page4 = HashMap::new(); - taxo_page4.insert("test-taxonomy".to_string(), vec!["école".to_string()]); - page4.meta.taxonomies = taxo_page4; - page4.lang = config.default_language.clone(); - library.insert_page(page4); - - // taxonomies should all be the same - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let tax = &taxonomies[0]; - - // if names are different permalinks should also be different so - // the items are still accessible - for term1 in tax.items.iter() { - for term2 in tax.items.iter() { - assert!(term1.name == term2.name || term1.permalink != term2.permalink); - } - } - - // under the safe slugify strategy all terms should be distinct - assert_eq!(tax.items.len(), 4); - } -} diff --git a/components/libs/Cargo.toml b/components/libs/Cargo.toml new file mode 100644 index 0000000000..cb83dc6562 --- /dev/null +++ b/components/libs/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "libs" +version = "0.1.0" +edition = "2021" + +[dependencies] +ahash = "0.7.6" +ammonia = "3" +atty = "0.2.11" +base64 = "0.13" +csv = "1" +elasticlunr-rs = { version = "3.0.0", features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] } +filetime = "0.2" +gh-emoji = "1" +glob = "0.3" +globset = "0.4" +image = "0.24" +lexical-sort = "0.3" +minify-html = "0.9" +nom-bibtex = "0.3" +num-format = "0.4" +once_cell = "1" +percent-encoding = "2" +pulldown-cmark = { version = "0.9", default-features = false, features = ["simd"] } +quickxml_to_serde = "0.5" +rayon = "1" +regex = "1" +relative-path = "1" +reqwest = { version = "0.11", default-features = false, features = ["blocking"] } +sass-rs = "0.2" +serde_json = "1" +serde_yaml = "0.8" +sha2 = "0.10" +slug = "0.1" +svg_metadata = "0.4" +syntect = "5" +tera = { version = "1", features = ["preserve_order"] } +termcolor = "1.0.4" +time = "0.3" +toml = "0.5" +unic-langid = "0.9" +unicode-segmentation = "1.2" +url = "2" +walkdir = "2" +webp = "0.2" + + +[features] +# TODO: fix me, it doesn't pick up the reqwuest feature if not set as default +default = ["rust-tls"] +rust-tls = ["reqwest/rustls-tls"] +native-tls = ["reqwest/default-tls"] +indexing-zh = ["elasticlunr-rs/zh"] +indexing-ja = ["elasticlunr-rs/ja"] diff --git a/components/libs/src/lib.rs b/components/libs/src/lib.rs new file mode 100644 index 0000000000..a1afcbb425 --- /dev/null +++ b/components/libs/src/lib.rs @@ -0,0 +1,45 @@ +//! This component is only there to re-export libraries used in the rest of the sub-crates +//! without having to add them to each `Cargo.toml`. This way, updating a library version only requires +//! modifying one crate instead of eg updating Tera in 5 sub crates using it. It also means if you want +//! to define features, it is done in a single place. +//! It doesn't work for crates exporting macros like `serde` or dev deps but that's ok for most. + +pub use ahash; +pub use ammonia; +pub use atty; +pub use base64; +pub use csv; +pub use elasticlunr; +pub use filetime; +pub use gh_emoji; +pub use glob; +pub use globset; +pub use image; +pub use lexical_sort; +pub use minify_html; +pub use nom_bibtex; +pub use num_format; +pub use once_cell; +pub use percent_encoding; +pub use pulldown_cmark; +pub use quickxml_to_serde; +pub use rayon; +pub use regex; +pub use relative_path; +pub use reqwest; +pub use sass_rs; +pub use serde_json; +pub use serde_yaml; +pub use sha2; +pub use slug; +pub use svg_metadata; +pub use syntect; +pub use tera; +pub use termcolor; +pub use time; +pub use toml; +pub use unic_langid; +pub use unicode_segmentation; +pub use url; +pub use walkdir; +pub use webp; diff --git a/components/link_checker/Cargo.toml b/components/link_checker/Cargo.toml index 4b4bca42d4..8a5e9720ae 100644 --- a/components/link_checker/Cargo.toml +++ b/components/link_checker/Cargo.toml @@ -1,24 +1,13 @@ [package] name = "link_checker" version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" +edition = "2021" [dependencies] -lazy_static = "1" - config = { path = "../config" } errors = { path = "../errors" } utils = { path = "../utils" } - -[dependencies.reqwest] -version = "0.11" -default-features = false -features = ["blocking"] +libs = { path = "../libs" } [dev-dependencies] -mockito = "0.30" - -[features] -rust-tls = ["reqwest/rustls-tls"] -native-tls = ["reqwest/default-tls"] +mockito = "0.31" diff --git a/components/link_checker/src/lib.rs b/components/link_checker/src/lib.rs index 48502d7b3a..3b3551afd6 100644 --- a/components/link_checker/src/lib.rs +++ b/components/link_checker/src/lib.rs @@ -1,13 +1,15 @@ -use lazy_static::lazy_static; -use reqwest::header::{HeaderMap, ACCEPT}; -use reqwest::{blocking::Client, StatusCode}; - -use config::LinkChecker; - use std::collections::HashMap; use std::result; use std::sync::{Arc, RwLock}; -use utils::links::has_anchor_id; + +use libs::once_cell::sync::Lazy; +use libs::reqwest::header::{HeaderMap, ACCEPT}; +use libs::reqwest::{blocking::Client, StatusCode}; + +use config::LinkChecker; +use errors::anyhow; + +use utils::anchors::has_anchor_id; pub type Result = result::Result; @@ -25,10 +27,9 @@ pub fn message(res: &Result) -> String { } } -lazy_static! { - // Keep history of link checks so a rebuild doesn't have to check again - static ref LINKS: Arc>> = Arc::new(RwLock::new(HashMap::new())); -} +// Keep history of link checks so a rebuild doesn't have to check again +static LINKS: Lazy>>> = + Lazy::new(|| Arc::new(RwLock::new(HashMap::new()))); pub fn check_url(url: &str, config: &LinkChecker) -> Result { { @@ -42,6 +43,7 @@ pub fn check_url(url: &str, config: &LinkChecker) -> Result { headers.insert(ACCEPT, "text/html".parse().unwrap()); headers.append(ACCEPT, "*/*".parse().unwrap()); + // TODO: pass the client to the check_url, do not pass the config let client = Client::builder() .user_agent(concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"))) .build() @@ -106,11 +108,10 @@ fn check_page_for_anchor(url: &str, body: String) -> errors::Result<()> { let index = url.find('#').unwrap(); let anchor = url.get(index + 1..).unwrap(); - - if has_anchor_id(&body, &anchor){ + if has_anchor_id(&body, anchor) { Ok(()) } else { - Err(errors::Error::from(format!("Anchor `#{}` not found on page", anchor))) + Err(anyhow!("Anchor `#{}` not found on page", anchor)) } } @@ -119,8 +120,8 @@ mod tests { use super::{ check_page_for_anchor, check_url, has_anchor, is_valid, message, LinkChecker, LINKS, }; + use libs::reqwest::StatusCode; use mockito::mock; - use reqwest::StatusCode; // NOTE: HTTP mock paths below are randomly generated to avoid name // collisions. Mocks with the same path can sometimes bleed between tests diff --git a/components/markdown/Cargo.toml b/components/markdown/Cargo.toml new file mode 100644 index 0000000000..d1226ee89e --- /dev/null +++ b/components/markdown/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "markdown" +version = "0.1.0" +edition = "2021" +include = ["src/**/*"] + +[dependencies] +pest = "2" +pest_derive = "2" + +errors = { path = "../errors" } +utils = { path = "../utils" } +config = { path = "../config" } +console = { path = "../console" } +libs = { path = "../libs" } + +[dev-dependencies] +templates = { path = "../templates" } +insta = "1.12.0" diff --git a/components/rendering/benches/all.rs b/components/markdown/benches/all.rs similarity index 87% rename from components/rendering/benches/all.rs rename to components/markdown/benches/all.rs index 2c30bfb3f9..5b4f915792 100644 --- a/components/rendering/benches/all.rs +++ b/components/markdown/benches/all.rs @@ -4,9 +4,9 @@ extern crate test; use std::collections::HashMap; use config::Config; -use front_matter::InsertAnchor; -use rendering::{render_content, RenderContext}; -use tera::Tera; +use libs::tera::Tera; +use markdown::{render_content, RenderContext}; +use utils::types::InsertAnchor; static CONTENT: &str = r#" # Modus cognitius profanam ne duae virtutis mundi @@ -85,10 +85,10 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) { let mut tera = Tera::default(); tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); let permalinks_ctx = HashMap::new(); - let mut config = Config::default(); + let mut config = Config::default_for_test(); config.markdown.highlight_code = true; let current_page_permalink = ""; - let context = RenderContext::new( + let mut context = RenderContext::new( &tera, &config, &config.default_language, @@ -96,6 +96,8 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) { &permalinks_ctx, InsertAnchor::None, ); + let shortcode_def = utils::templates::get_shortcodes(&tera); + context.set_shortcode_definitions(&shortcode_def); b.iter(|| render_content(CONTENT, &context).unwrap()); } @@ -104,10 +106,10 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) { let mut tera = Tera::default(); tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); let permalinks_ctx = HashMap::new(); - let mut config = Config::default(); + let mut config = Config::default_for_test(); config.markdown.highlight_code = false; let current_page_permalink = ""; - let context = RenderContext::new( + let mut context = RenderContext::new( &tera, &config, &config.default_language, @@ -115,13 +117,16 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) { &permalinks_ctx, InsertAnchor::None, ); + let shortcode_def = utils::templates::get_shortcodes(&tera); + context.set_shortcode_definitions(&shortcode_def); b.iter(|| render_content(CONTENT, &context).unwrap()); } +#[bench] fn bench_render_content_no_shortcode(b: &mut test::Bencher) { let tera = Tera::default(); let content2 = CONTENT.replace(r#"{{ youtube(id="my_youtube_id") }}"#, ""); - let mut config = Config::default(); + let mut config = Config::default_for_test(); config.markdown.highlight_code = false; let permalinks_ctx = HashMap::new(); let current_page_permalink = ""; @@ -141,7 +146,7 @@ fn bench_render_content_no_shortcode(b: &mut test::Bencher) { fn bench_render_content_with_emoji(b: &mut test::Bencher) { let tera = Tera::default(); let content2 = CONTENT.replace(r#"{{ youtube(id="my_youtube_id") }}"#, ""); - let mut config = Config::default(); + let mut config = Config::default_for_test(); config.markdown.highlight_code = false; config.markdown.render_emoji = true; let permalinks_ctx = HashMap::new(); diff --git a/components/rendering/src/codeblock/fence.rs b/components/markdown/src/codeblock/fence.rs similarity index 100% rename from components/rendering/src/codeblock/fence.rs rename to components/markdown/src/codeblock/fence.rs diff --git a/components/rendering/src/codeblock/highlight.rs b/components/markdown/src/codeblock/highlight.rs similarity index 92% rename from components/rendering/src/codeblock/highlight.rs rename to components/markdown/src/codeblock/highlight.rs index d14aec29e0..04d4dddd5a 100644 --- a/components/rendering/src/codeblock/highlight.rs +++ b/components/markdown/src/codeblock/highlight.rs @@ -1,13 +1,13 @@ use std::fmt::Write; use config::highlighting::{SyntaxAndTheme, CLASS_STYLE}; -use syntect::easy::HighlightLines; -use syntect::highlighting::{Color, Theme}; -use syntect::html::{ +use libs::syntect::easy::HighlightLines; +use libs::syntect::highlighting::{Color, Theme}; +use libs::syntect::html::{ line_tokens_to_classed_spans, styled_line_to_highlighted_html, ClassStyle, IncludeBackground, }; -use syntect::parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet}; -use tera::escape_html; +use libs::syntect::parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet}; +use libs::tera::escape_html; /// Not public, but from syntect::html fn write_css_color(s: &mut String, c: Color) { @@ -37,13 +37,15 @@ impl<'config> ClassHighlighter<'config> { /// also use of the `load_defaults_newlines` version of the syntaxes. pub fn highlight_line(&mut self, line: &str) -> String { debug_assert!(line.ends_with('\n')); - let parsed_line = self.parse_state.parse_line(line, self.syntax_set); + let parsed_line = + self.parse_state.parse_line(line, self.syntax_set).expect("failed to parse line"); let (formatted_line, delta) = line_tokens_to_classed_spans( line, parsed_line.as_slice(), CLASS_STYLE, &mut self.scope_stack, - ); + ) + .expect("line_tokens_to_classed_spans should not fail"); self.open_spans += delta; formatted_line } @@ -81,12 +83,14 @@ impl<'config> InlineHighlighter<'config> { } pub fn highlight_line(&mut self, line: &str) -> String { - let regions = self.h.highlight(line, self.syntax_set); + let regions = + self.h.highlight_line(line, self.syntax_set).expect("failed to highlight line"); // TODO: add a param like `IncludeBackground` for `IncludeForeground` in syntect let highlighted = styled_line_to_highlighted_html( ®ions, IncludeBackground::IfDifferent(self.bg_color), - ); + ) + .expect("styled_line_to_highlighted_html should not error"); // Spans don't get nested even if the scopes generated by the syntax highlighting do, // so this is safe even when some internal scope happens to have the same color // as the default foreground color. Also note that `"`s in the original source @@ -192,7 +196,7 @@ mod tests { use super::*; use config::highlighting::resolve_syntax_and_theme; use config::Config; - use syntect::util::LinesWithEndings; + use libs::syntect::util::LinesWithEndings; #[test] fn can_highlight_with_classes() { diff --git a/components/rendering/src/codeblock/mod.rs b/components/markdown/src/codeblock/mod.rs similarity index 96% rename from components/rendering/src/codeblock/mod.rs rename to components/markdown/src/codeblock/mod.rs index 1e8d1eb09b..5c9a463143 100644 --- a/components/rendering/src/codeblock/mod.rs +++ b/components/markdown/src/codeblock/mod.rs @@ -3,7 +3,7 @@ mod highlight; use std::ops::RangeInclusive; -use syntect::util::LinesWithEndings; +use libs::syntect::util::LinesWithEndings; use crate::codeblock::highlight::SyntaxHighlighter; use config::highlighting::{resolve_syntax_and_theme, HighlightSource}; @@ -162,6 +162,10 @@ impl<'config> CodeBlock<'config> { let highlighted_line = self.highlighter.highlight_line(line); maybe_mark(&mut buffer, &highlighted_line); + + if self.line_numbers { + buffer.push_str(""); + } } if let Some(rest) = self.highlighter.finalize() { @@ -169,7 +173,7 @@ impl<'config> CodeBlock<'config> { } if self.line_numbers { - buffer.push_str(""); + buffer.push_str(""); } buffer diff --git a/components/rendering/src/content.pest b/components/markdown/src/content.pest similarity index 100% rename from components/rendering/src/content.pest rename to components/markdown/src/content.pest diff --git a/components/rendering/src/context.rs b/components/markdown/src/context.rs similarity index 97% rename from components/rendering/src/context.rs rename to components/markdown/src/context.rs index 1bc23c6388..cefce7021a 100644 --- a/components/rendering/src/context.rs +++ b/components/markdown/src/context.rs @@ -2,9 +2,9 @@ use std::borrow::Cow; use std::collections::HashMap; use config::Config; -use front_matter::InsertAnchor; -use tera::{Context, Tera}; +use libs::tera::{Context, Tera}; use utils::templates::ShortcodeDefinition; +use utils::types::InsertAnchor; /// All the information from the zola site that is needed to render HTML from markdown #[derive(Debug)] diff --git a/components/rendering/src/lib.rs b/components/markdown/src/lib.rs similarity index 90% rename from components/rendering/src/lib.rs rename to components/markdown/src/lib.rs index 1193610215..f374442312 100644 --- a/components/rendering/src/lib.rs +++ b/components/markdown/src/lib.rs @@ -2,16 +2,14 @@ mod codeblock; mod context; mod markdown; mod shortcode; -mod table_of_contents; use shortcode::{extract_shortcodes, insert_md_shortcodes}; use errors::Result; +use crate::markdown::markdown_to_html; +pub use crate::markdown::Rendered; pub use context::RenderContext; -use markdown::markdown_to_html; -pub use markdown::Rendered; -pub use table_of_contents::Heading; pub fn render_content(content: &str, context: &RenderContext) -> Result { // avoid parsing the content if needed diff --git a/components/rendering/src/markdown.rs b/components/markdown/src/markdown.rs similarity index 69% rename from components/rendering/src/markdown.rs rename to components/markdown/src/markdown.rs index 4181c2dcb3..63007e12b5 100644 --- a/components/rendering/src/markdown.rs +++ b/components/markdown/src/markdown.rs @@ -1,13 +1,19 @@ -use lazy_static::lazy_static; -use pulldown_cmark as cmark; +use std::fmt::Write; + +use errors::bail; +use libs::gh_emoji::Replacer as EmojiReplacer; +use libs::once_cell::sync::Lazy; +use libs::pulldown_cmark as cmark; +use libs::tera; use crate::context::RenderContext; -use crate::table_of_contents::{make_table_of_contents, Heading}; -use errors::{Error, Result}; -use front_matter::InsertAnchor; +use errors::{Context, Error, Result}; +use libs::pulldown_cmark::escape::escape_html; +use libs::regex::Regex; use utils::site::resolve_internal_link; use utils::slugs::slugify_anchors; -use utils::vec::InsertMany; +use utils::table_of_contents::{make_table_of_contents, Heading}; +use utils::types::InsertAnchor; use self::cmark::{Event, LinkType, Options, Parser, Tag}; use crate::codeblock::{CodeBlock, FenceSettings}; @@ -15,6 +21,43 @@ use crate::shortcode::{Shortcode, SHORTCODE_PLACEHOLDER}; const CONTINUE_READING: &str = ""; const ANCHOR_LINK_TEMPLATE: &str = "anchor-link.html"; +static EMOJI_REPLACER: Lazy = Lazy::new(EmojiReplacer::new); + +/// Although there exists [a list of registered URI schemes][uri-schemes], a link may use arbitrary, +/// private schemes. This regex checks if the given string starts with something that just looks +/// like a scheme, i.e., a case-insensitive identifier followed by a colon. +/// +/// [uri-schemes]: https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml +static STARTS_WITH_SCHEMA_RE: Lazy = Lazy::new(|| Regex::new(r"^[0-9A-Za-z\-]+:").unwrap()); + +/// Matches a .. tag, getting the opening tag in a capture group. +/// Used only with AnchorInsert::Heading to grab it from the template +static A_HTML_TAG: Lazy = Lazy::new(|| Regex::new(r"(<\s*a[^>]*>).*?<\s*/\s*a>").unwrap()); + +/// Efficiently insert multiple element in their specified index. +/// The elements should sorted in ascending order by their index. +/// +/// This is done in O(n) time. +fn insert_many(input: &mut Vec, elem_to_insert: Vec<(usize, T)>) { + let mut inserted = vec![]; + let mut last_idx = 0; + + for (idx, elem) in elem_to_insert.into_iter() { + let head_len = idx - last_idx; + inserted.extend(input.splice(0..head_len, std::iter::empty())); + inserted.push(elem); + last_idx = idx; + } + let len = input.len(); + inserted.extend(input.drain(0..len)); + + *input = inserted; +} + +/// Colocated asset links refers to the files in the same directory. +fn is_colocated_asset_link(link: &str) -> bool { + !link.starts_with('/') && !link.starts_with('#') && !STARTS_WITH_SCHEMA_RE.is_match(link) +} #[derive(Debug)] pub struct Rendered { @@ -34,11 +77,39 @@ struct HeadingRef { end_idx: usize, level: u32, id: Option, + classes: Vec, } impl HeadingRef { - fn new(start: usize, level: u32) -> HeadingRef { - HeadingRef { start_idx: start, end_idx: 0, level, id: None } + fn new(start: usize, level: u32, anchor: Option, classes: &[String]) -> HeadingRef { + HeadingRef { start_idx: start, end_idx: 0, level, id: anchor, classes: classes.to_vec() } + } + + fn to_html(&self, id: &str) -> String { + let mut buffer = String::with_capacity(100); + buffer.write_str("").unwrap(); + buffer } } @@ -86,24 +157,35 @@ fn fix_link( resolved.permalink } Err(_) => { - return Err(format!("Relative link {} not found.", link).into()); + let msg = format!( + "Broken relative link `{}` in {}", + link, + context.current_page_path.unwrap_or("unknown"), + ); + match context.config.link_checker.internal_level { + config::LinkCheckerLevel::Error => bail!(msg), + config::LinkCheckerLevel::Warn => { + console::warn(&msg); + link.to_string() + } + } } } - } else { - if is_external_link(link) { - external_links.push(link.to_owned()); - link.to_owned() - } else if link.starts_with("#") { - // local anchor without the internal zola path - if let Some(current_path) = context.current_page_path { - internal_links.push((current_path.to_owned(), Some(link[1..].to_owned()))); - format!("{}{}", context.current_page_permalink, &link) - } else { - link.to_string() - } + } else if is_external_link(link) { + external_links.push(link.to_owned()); + link.to_owned() + } else if link == "#" { + link.to_string() + } else if let Some(stripped_link) = link.strip_prefix('#') { + // local anchor without the internal zola path + if let Some(current_path) = context.current_page_path { + internal_links.push((current_path.to_owned(), Some(stripped_link.to_owned()))); + format!("{}{}", context.current_page_permalink, &link) } else { link.to_string() } + } else { + link.to_string() }; Ok(result) @@ -128,10 +210,15 @@ fn get_heading_refs(events: &[Event]) -> Vec { for (i, event) in events.iter().enumerate() { match event { - Event::Start(Tag::Heading(level)) => { - heading_refs.push(HeadingRef::new(i, *level)); + Event::Start(Tag::Heading(level, anchor, classes)) => { + heading_refs.push(HeadingRef::new( + i, + *level as u32, + anchor.map(|a| a.to_owned()), + &classes.iter().map(|x| x.to_string()).collect::>(), + )); } - Event::End(Tag::Heading(_)) => { + Event::End(Tag::Heading(_, _, _)) => { heading_refs.last_mut().expect("Heading end before start?").end_idx = i; } _ => (), @@ -146,10 +233,6 @@ pub fn markdown_to_html( context: &RenderContext, html_shortcodes: Vec, ) -> Result { - lazy_static! { - static ref EMOJI_REPLACER: gh_emoji::Replacer = gh_emoji::Replacer::new(); - } - let path = context .tera_context .get("page") @@ -162,7 +245,6 @@ pub fn markdown_to_html( let mut code_block: Option = None; - let mut inserted_anchors: Vec = vec![]; let mut headings: Vec = vec![]; let mut internal_links = Vec::new(); let mut external_links = Vec::new(); @@ -175,6 +257,7 @@ pub fn markdown_to_html( opts.insert(Options::ENABLE_FOOTNOTES); opts.insert(Options::ENABLE_STRIKETHROUGH); opts.insert(Options::ENABLE_TASKLISTS); + opts.insert(Options::ENABLE_HEADING_ATTRIBUTES); if context.config.markdown.smart_punctuation { opts.insert(Options::ENABLE_SMART_PUNCTUATION); @@ -238,13 +321,12 @@ pub fn markdown_to_html( }; } + let mut accumulated_block = String::new(); for (event, mut range) in Parser::new_ext(content, opts).into_offset_iter() { match event { Event::Text(text) => { - if let Some(ref mut code_block) = code_block { - let html; + if let Some(ref mut _code_block) = code_block { if contains_shortcode(text.as_ref()) { - let mut accumulated_block = String::new(); // mark the start of the code block events let stack_start = events.len(); render_shortcodes!(true, text, range); @@ -262,13 +344,12 @@ pub fn markdown_to_html( } } } - html = code_block.highlight(&accumulated_block); + // remove all the original events from shortcode rendering events.truncate(stack_start); } else { - html = code_block.highlight(&text); + accumulated_block += &text; } - events.push(Event::Html(html.into())); } else { let text = if context.config.markdown.render_emoji { EMOJI_REPLACER.replace_all(&text).to_string().into() @@ -294,10 +375,24 @@ pub fn markdown_to_html( events.push(Event::Html(begin.into())); } Event::End(Tag::CodeBlock(_)) => { + if let Some(ref mut code_block) = code_block { + let html = code_block.highlight(&accumulated_block); + events.push(Event::Html(html.into())); + accumulated_block.clear(); + } + // reset highlight and close the code block code_block = None; events.push(Event::Html("\n".into())); } + Event::Start(Tag::Image(link_type, src, title)) => { + if is_colocated_asset_link(&src) { + let link = format!("{}{}", context.current_page_permalink, &*src); + events.push(Event::Start(Tag::Image(link_type, link.into(), title))); + } else { + events.push(Event::Start(Tag::Image(link_type, src, title))); + } + } Event::Start(Tag::Link(link_type, link, title)) if link.is_empty() => { error = Some(Error::msg("There is a link that is missing a URL")); events.push(Event::Start(Tag::Link(link_type, "#".into(), title))); @@ -390,45 +485,34 @@ pub fn markdown_to_html( }) .collect(); - let mut heading_refs = get_heading_refs(&events); + let heading_refs = get_heading_refs(&events); let mut anchors_to_insert = vec![]; - - // First heading pass: look for a manually-specified IDs, e.g. `# Heading text {#hash}` - // (This is a separate first pass so that auto IDs can avoid collisions with manual IDs.) - for heading_ref in heading_refs.iter_mut() { - let end_idx = heading_ref.end_idx; - if let Event::Text(ref mut text) = events[end_idx - 1] { - if text.as_bytes().last() == Some(&b'}') { - if let Some(mut i) = text.find("{#") { - let id = text[i + 2..text.len() - 1].to_owned(); - inserted_anchors.push(id.clone()); - while i > 0 && text.as_bytes()[i - 1] == b' ' { - i -= 1; - } - heading_ref.id = Some(id); - *text = text[..i].to_owned().into(); - } - } + let mut inserted_anchors = vec![]; + for heading in &heading_refs { + if let Some(s) = &heading.id { + inserted_anchors.push(s.to_owned()); } } // Second heading pass: auto-generate remaining IDs, and emit HTML - for heading_ref in heading_refs { + for mut heading_ref in heading_refs { let start_idx = heading_ref.start_idx; let end_idx = heading_ref.end_idx; let title = get_text(&events[start_idx + 1..end_idx]); - let id = heading_ref.id.unwrap_or_else(|| { - find_anchor( + + if heading_ref.id.is_none() { + heading_ref.id = Some(find_anchor( &inserted_anchors, slugify_anchors(&title, context.config.slugify.anchors), 0, - ) - }); - inserted_anchors.push(id.clone()); + )); + } + + inserted_anchors.push(heading_ref.id.clone().unwrap()); + let id = inserted_anchors.last().unwrap(); - // insert `id` to the tag - let html = format!("", lvl = heading_ref.level, id = id); + let html = heading_ref.to_html(id); events[start_idx] = Event::Html(html.into()); // generate anchors and places to insert them @@ -436,7 +520,8 @@ pub fn markdown_to_html( let anchor_idx = match context.insert_anchor { InsertAnchor::Left => start_idx + 1, InsertAnchor::Right => end_idx, - InsertAnchor::None => 0, // Not important + InsertAnchor::Heading => 0, // modified later to the correct value + InsertAnchor::None => unreachable!(), }; let mut c = tera::Context::new(); c.insert("id", &id); @@ -449,19 +534,32 @@ pub fn markdown_to_html( c, &None, ) - .map_err(|e| Error::chain("Failed to render anchor link template", e))?; - anchors_to_insert.push((anchor_idx, Event::Html(anchor_link.into()))); + .context("Failed to render anchor link template")?; + if context.insert_anchor != InsertAnchor::Heading { + anchors_to_insert.push((anchor_idx, Event::Html(anchor_link.into()))); + } else { + if let Some(captures) = A_HTML_TAG.captures(&anchor_link) { + let opening_tag = captures.get(1).map_or("", |m| m.as_str()).to_string(); + anchors_to_insert.push((start_idx + 1, Event::Html(opening_tag.into()))); + anchors_to_insert.push((end_idx, Event::Html("".into()))); + } + } } // record heading to make table of contents let permalink = format!("{}#{}", context.current_page_permalink, id); - let h = - Heading { level: heading_ref.level, id, permalink, title, children: Vec::new() }; + let h = Heading { + level: heading_ref.level, + id: id.to_owned(), + permalink, + title, + children: Vec::new(), + }; headings.push(h); } if context.insert_anchor != InsertAnchor::None { - events.insert_many(anchors_to_insert); + insert_many(&mut events, anchors_to_insert); } cmark::html::push_html(&mut html, events.into_iter()); @@ -483,6 +581,17 @@ pub fn markdown_to_html( #[cfg(test)] mod tests { use super::*; + #[test] + + fn insert_many_works() { + let mut v = vec![1, 2, 3, 4, 5]; + insert_many(&mut v, vec![(0, 0), (2, -1), (5, 6)]); + assert_eq!(v, &[0, 1, 2, -1, 3, 4, 5, 6]); + + let mut v2 = vec![1, 2, 3, 4, 5]; + insert_many(&mut v2, vec![(0, 0), (2, -1)]); + assert_eq!(v2, &[0, 1, 2, -1, 3, 4, 5]); + } #[test] fn test_is_external_link() { diff --git a/components/rendering/src/shortcode/mod.rs b/components/markdown/src/shortcode/mod.rs similarity index 99% rename from components/rendering/src/shortcode/mod.rs rename to components/markdown/src/shortcode/mod.rs index 7c142f8927..4239ed4d37 100644 --- a/components/rendering/src/shortcode/mod.rs +++ b/components/markdown/src/shortcode/mod.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; use errors::{Error, Result}; +use libs::tera; use utils::templates::{ShortcodeDefinition, ShortcodeFileType}; mod parser; diff --git a/components/rendering/src/shortcode/parser.rs b/components/markdown/src/shortcode/parser.rs similarity index 98% rename from components/rendering/src/shortcode/parser.rs rename to components/markdown/src/shortcode/parser.rs index bcb14116bc..230c36cf93 100644 --- a/components/rendering/src/shortcode/parser.rs +++ b/components/markdown/src/shortcode/parser.rs @@ -1,11 +1,11 @@ use std::ops::Range; -use errors::{bail, Result}; +use errors::{bail, Context as ErrorContext, Result}; +use libs::tera::{to_value, Context, Map, Tera, Value}; use pest::iterators::Pair; use pest::Parser; use pest_derive::Parser; use std::collections::HashMap; -use tera::{to_value, Context, Map, Tera, Value}; use utils::templates::ShortcodeFileType; pub const SHORTCODE_PLACEHOLDER: &str = "@@ZOLA_SC_PLACEHOLDER@@"; @@ -43,7 +43,7 @@ impl Shortcode { new_context.extend(context.clone()); let res = utils::templates::render_template(&tpl_name, tera, new_context, &None) - .map_err(|e| errors::Error::chain(format!("Failed to render {} shortcode", name), e))? + .with_context(|| format!("Failed to render {} shortcode", name))? .replace("\r\n", "\n"); Ok(res) @@ -481,10 +481,10 @@ mod tests { fn can_handle_multiple_shortcodes() { let (_, shortcodes) = parse_for_shortcodes( r#" - {{ youtube(id="ub36ffWAqgQ") }} + {{ youtube(id="ub36ffWAqgQ_hey_") }} {{ youtube(id="ub36ffWAqgQ", autoplay=true) }} - {{ vimeo(id="210073083") }} - {{ streamable(id="c0ic") }} + {{ vimeo(id="210073083#hello", n_a_me="hello") }} + {{ streamable(id="c0ic", n1=true) }} {{ gist(url="https://gist.github.com/Keats/32d26f699dcc13ebd41b") }}"#, ) .unwrap(); diff --git a/components/markdown/tests/codeblocks.rs b/components/markdown/tests/codeblocks.rs new file mode 100644 index 0000000000..4b27d29663 --- /dev/null +++ b/components/markdown/tests/codeblocks.rs @@ -0,0 +1,343 @@ +use config::Config; + +mod common; + +fn render_codeblock(content: &str, highlight_code: bool) -> String { + let mut config = Config::default_for_test(); + config.markdown.highlight_code = highlight_code; + common::render_with_config(content, config).unwrap().body +} + +#[test] +fn does_nothing_with_highlighting_disabled() { + let body = render_codeblock( + r#" +``` +foo +bar +``` + "#, + false, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_hide_lines() { + let body = render_codeblock( + r#" +```hide_lines=2 +foo +bar +baz +bat +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_single_line() { + let body = render_codeblock( + r#" +```hl_lines=2 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_line_range() { + let body = render_codeblock( + r#" +```hl_lines=2-3 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_all_lines() { + let body = render_codeblock( + r#" +```hl_lines=1-4 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_zero_start_same_as_one() { + let body = render_codeblock( + r#" +```hl_lines=0-3 +foo +bar +bar +baz +``` + "#, + true, + ); + let body2 = render_codeblock( + r#" +```hl_lines=1-3 +foo +bar +bar +baz +``` + "#, + true, + ); + assert_eq!(body, body2); +} + +#[test] +fn can_highlight_at_end() { + let body = render_codeblock( + r#" +```hl_lines=3-4 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_out_of_bounds() { + let body = render_codeblock( + r#" +```hl_lines=3-4567898765 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_ranges_overlap() { + let body = render_codeblock( + r#" +```hl_lines=2-3 1-2 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_weird_fence_tokens() { + let body = render_codeblock( + r#" +```hl_lines=2-3, hl_lines = 1 - 2 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_mix_line_ranges() { + let body = render_codeblock( + r#" +```hl_lines=1 3-4 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_single_line_range() { + let body = render_codeblock( + r#" +```hl_lines=2-2 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_reversed_range() { + let body = render_codeblock( + r#" +```hl_lines=3-2 +foo +bar +bar +baz +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_add_line_numbers() { + let body = render_codeblock( + r#" +```linenos +foo +bar +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_add_line_numbers_windows_eol() { + let body = render_codeblock("```linenos\r\nfoo\r\nbar\r\n```\r\n", true); + insta::assert_snapshot!(body); +} + +#[test] +fn can_add_line_numbers_with_lineno_start() { + let body = render_codeblock( + r#" +```linenos, linenostart=40 +foo +bar +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_add_line_numbers_with_highlight() { + let body = render_codeblock( + r#" +```linenos, hl_lines=2 +foo +bar +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_shortcode_in_codeblock() { + let body = render_codeblock( + r#" +```html,linenos +
+{{ out_put_id(id="dQw4w9WgXcQ") }} +
+``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_multiple_shortcodes_in_codeblock() { + let body = render_codeblock( + r#" +```linenos +text1 +{{ out_put_id(id="first") }} +text2 +{{ out_put_id(id="second") }} +text3 +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_completely_mixed_codeblock() { + let body = render_codeblock( + r#" +```html,linenos +{{/* before(texts="1") */}} +Normally people would not write something & like <> this: +
+An inline {{ out_put_id(id="dQw4w9WgXcQ") }} shortcode +
+Plain text in-between +{%/* quote(author="Vincent") */%} +A quote +{%/* end */%} +{# A Tera comment, you should see it #} + +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} + +#[test] +fn can_highlight_unknown_lang() { + let body = render_codeblock( + r#" +```rustscript +foo +bar +``` + "#, + true, + ); + insta::assert_snapshot!(body); +} diff --git a/components/markdown/tests/common.rs b/components/markdown/tests/common.rs new file mode 100644 index 0000000000..aaef9784cc --- /dev/null +++ b/components/markdown/tests/common.rs @@ -0,0 +1,87 @@ +#![allow(dead_code)] + +use std::collections::HashMap; + +use libs::tera::Tera; + +use config::Config; +use errors::Result; +use markdown::{render_content, RenderContext, Rendered}; +use templates::ZOLA_TERA; +use utils::types::InsertAnchor; + +fn configurable_render( + content: &str, + config: Config, + insert_anchor: InsertAnchor, +) -> Result { + let mut tera = Tera::default(); + tera.extend(&ZOLA_TERA).unwrap(); + + // out_put_id looks like a markdown string + tera.add_raw_template("shortcodes/out_put_id.html", "{{id}}").unwrap(); + tera.add_raw_template( + "shortcodes/image.html", + "{{alt}}", + ) + .unwrap(); + tera.add_raw_template("shortcodes/split_lines.html", r#"{{ body | split(pat="\n") }}"#) + .unwrap(); + tera.add_raw_template("shortcodes/ex1.html", "1").unwrap(); + tera.add_raw_template("shortcodes/ex2.html", "2").unwrap(); + tera.add_raw_template("shortcodes/ex3.html", "3").unwrap(); + tera.add_raw_template("shortcodes/with_tabs.html", "
\n\tHello World!\n
") + .unwrap(); + tera.add_raw_template( + "shortcodes/web_component.html", + "{{ body | safe}}", + ) + .unwrap(); + tera.add_raw_template("shortcodes/render_md.html", "
{{ body | markdown | safe}}
") + .unwrap(); + tera.add_raw_template("shortcodes/a.html", "

a: {{ nth }}

").unwrap(); + tera.add_raw_template("shortcodes/b.html", "

b: {{ nth }}

").unwrap(); + tera.add_raw_template("shortcodes/quote.html", "{{body}}").unwrap(); + tera.add_raw_template("shortcodes/pre.html", "
{{body}}
").unwrap(); + tera.add_raw_template("shortcodes/four_spaces.html", " no highlight\n or there").unwrap(); + tera.add_raw_template("shortcodes/i18n.html", "{{lang}}").unwrap(); + tera.add_raw_template( + "shortcodes/book.md", + "![Book cover in {{ lang }}](cover.{{ lang }}.png)", + ) + .unwrap(); + tera.add_raw_template("shortcodes/md_passthrough.md", "{{body}}").unwrap(); + + let mut permalinks = HashMap::new(); + permalinks.insert("pages/about.md".to_owned(), "https://getzola.org/about/".to_owned()); + + tera.register_filter( + "markdown", + templates::filters::MarkdownFilter::new(config.clone(), permalinks.clone(), tera.clone()), + ); + let mut context = RenderContext::new( + &tera, + &config, + &config.default_language, + "https://www.getzola.org/test/", + &permalinks, + insert_anchor, + ); + let shortcode_def = utils::templates::get_shortcodes(&tera); + context.set_shortcode_definitions(&shortcode_def); + context.set_current_page_path("my_page.md"); + + render_content(content, &context) +} + +pub fn render(content: &str) -> Result { + configurable_render(content, Config::default_for_test(), InsertAnchor::None) +} + +pub fn render_with_config(content: &str, config: Config) -> Result { + configurable_render(content, config, InsertAnchor::None) +} + +pub fn render_with_insert_anchor(content: &str, insert_anchor: InsertAnchor) -> Result { + configurable_render(content, Config::default_for_test(), insert_anchor) +} diff --git a/components/markdown/tests/links.rs b/components/markdown/tests/links.rs new file mode 100644 index 0000000000..9a166988cc --- /dev/null +++ b/components/markdown/tests/links.rs @@ -0,0 +1,55 @@ +mod common; + +#[test] +fn can_detect_links() { + // no links + let rendered = common::render("Hello World!").unwrap(); + assert_eq!(rendered.internal_links.len(), 0); + assert_eq!(rendered.external_links.len(), 0); + + // external + let rendered = common::render("[abc](https://google.com/)").unwrap(); + assert_eq!(rendered.internal_links.len(), 0); + assert_eq!(rendered.external_links.len(), 1); + assert_eq!(rendered.external_links[0], "https://google.com/"); + + // internal + let rendered = common::render("[abc](@/pages/about.md)").unwrap(); + assert_eq!(rendered.internal_links, vec![("pages/about.md".to_owned(), None)]); + assert_eq!(rendered.external_links.len(), 0); + + // internal with anchors + let rendered = common::render("[abc](@/pages/about.md#hello)").unwrap(); + assert_eq!(rendered.internal_links[0], ("pages/about.md".to_owned(), Some("hello".to_owned()))); + assert_eq!(rendered.external_links.len(), 0); + + // internal link referring to self + let rendered = common::render("[abc](#hello)").unwrap(); + assert_eq!(rendered.internal_links.len(), 1); + assert_eq!(rendered.internal_links[0], ("my_page.md".to_owned(), Some("hello".to_owned()))); + assert_eq!(rendered.external_links.len(), 0); + + // Mixed with various protocols + let rendered = common::render( + " +[a link](http://google.com) +[a link](http://google.fr) +Email: [foo@bar.baz](mailto:foo@bar.baz) +Email: ", + ) + .unwrap(); + assert_eq!(rendered.internal_links.len(), 0); + assert_eq!( + rendered.external_links, + &["http://google.com".to_owned(), "http://google.fr".to_owned()] + ); + + // Not pointing to anything known so that's an error + let res = common::render("[abc](@/def/123.md)"); + assert!(res.is_err()); + + // Empty link is an error as well + let res = common::render("[abc]()"); + assert!(res.is_err()); + assert_eq!(res.unwrap_err().to_string(), "There is a link that is missing a URL"); +} diff --git a/components/markdown/tests/markdown.rs b/components/markdown/tests/markdown.rs new file mode 100644 index 0000000000..a12246837b --- /dev/null +++ b/components/markdown/tests/markdown.rs @@ -0,0 +1,357 @@ +use std::collections::HashMap; + +use libs::tera::Tera; + +use config::Config; +use markdown::{render_content, RenderContext}; +use templates::ZOLA_TERA; +use utils::slugs::SlugifyStrategy; +use utils::types::InsertAnchor; + +mod common; + +#[test] +fn can_render_basic_markdown() { + let cases = vec![ + "Hello world", + "# Hello world", + "Hello *world*", + "Hello\n\tworld", + "Non rendered emoji :smile:", + "[a link](image.jpg)", + "![alt text](image.jpg)", + "

some html

", + ]; + + let body = common::render(&cases.join("\n")).unwrap().body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_make_zola_internal_links() { + let body = common::render( + r#" +[rel link](@/pages/about.md) +[rel link with anchor](@/pages/about.md#cv) +[abs link](https://getzola.org/about/) + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_handle_heading_ids() { + let mut config = Config::default_for_test(); + + let cases = vec![ + // Basic + "# Hello", + // Same slug as first + "# Hello", + // not a slug because of the slugify strategy chosen + "# L'écologie et vous", + // Chosen slug that already exists without space + "# Hello{#hello}", + // Chosen slug that already exists with space + "# Hello {#hello}", + "# Hello {#Something_else}", + "# Workaround for literal {#…}", + "# Auto {#*matic*}", + // and now some empty heading + "# ", + "# ", + // zola internal links + "# [About](@/pages/about.md)", + // https://github.com/Keats/gutenberg/issues/297 + "# [Rust](https://rust-lang.org \"Rust homepage\")", + // and then some markdown in them + "# `hi`", + "# *hi*", + "# **hi**", + // See https://github.com/getzola/zola/issues/569 + "# text [^1] there\n[^1]: footnote", + // Chosen slug that already exists with space + "# Classes {#classes .bold .another}", + ]; + let body = common::render_with_config(&cases.join("\n"), config.clone()).unwrap().body; + insta::assert_snapshot!(body); + + // And now test without slugifying everything + config.slugify.anchors = SlugifyStrategy::Safe; + let body = common::render_with_config(&cases.join("\n"), config).unwrap().body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_insert_anchors() { + let cases = vec![ + // Basic + "# Hello\n# World", + // https://github.com/Keats/gutenberg/issues/42 + "# Hello!", + // https://github.com/Keats/gutenberg/issues/53 + "## [Rust](https://rust-lang.org)", + "# Hello*_()", + ]; + let body = + common::render_with_insert_anchor(&cases.join("\n"), InsertAnchor::Left).unwrap().body; + insta::assert_snapshot!(body); + let body = + common::render_with_insert_anchor(&cases.join("\n"), InsertAnchor::Right).unwrap().body; + insta::assert_snapshot!(body); + let body = + common::render_with_insert_anchor(&cases.join("\n"), InsertAnchor::Heading).unwrap().body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_customise_anchor_template() { + let mut tera = Tera::default(); + tera.extend(&ZOLA_TERA).unwrap(); + tera.add_raw_template("anchor-link.html", " (in {{ lang }})").unwrap(); + let permalinks_ctx = HashMap::new(); + let config = Config::default_for_test(); + let context = RenderContext::new( + &tera, + &config, + &config.default_language, + "", + &permalinks_ctx, + InsertAnchor::Right, + ); + let body = render_content("# Hello", &context).unwrap().body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_use_smart_punctuation() { + let mut config = Config::default_for_test(); + config.markdown.smart_punctuation = true; + let body = common::render_with_config(r#"This -- is "it"..."#, config).unwrap().body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_use_external_links_options() { + let mut config = Config::default_for_test(); + + // no options + let body = common::render("").unwrap().body; + insta::assert_snapshot!(body); + + // target blank + config.markdown.external_links_target_blank = true; + let body = common::render_with_config("", config.clone()).unwrap().body; + insta::assert_snapshot!(body); + + // no follow + config.markdown.external_links_target_blank = false; + config.markdown.external_links_no_follow = true; + let body = common::render_with_config("", config.clone()).unwrap().body; + insta::assert_snapshot!(body); + + // no referrer + config.markdown.external_links_no_follow = false; + config.markdown.external_links_no_referrer = true; + let body = common::render_with_config("", config.clone()).unwrap().body; + insta::assert_snapshot!(body); + + // all of them + config.markdown.external_links_no_follow = true; + config.markdown.external_links_target_blank = true; + config.markdown.external_links_no_referrer = true; + let body = common::render_with_config("", config).unwrap().body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_emojis() { + let mut config = Config::default_for_test(); + config.markdown.render_emoji = true; + let body = common::render_with_config("Hello, World! :smile:", config).unwrap().body; + assert_eq!(body, "

Hello, World! 😄

\n"); +} + +// https://github.com/getzola/zola/issues/747 +// https://github.com/getzola/zola/issues/816 +#[test] +fn custom_url_schemes_are_untouched() { + let body = common::render( + r#" +[foo@bar.tld](xmpp:foo@bar.tld) + +[(123) 456-7890](tel:+11234567890) + +[blank page](about:blank) + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn all_markdown_features_integration() { + let body = common::render( + r#" + + +# h1 Heading + +## h2 Heading + +### h3 Heading + +#### h4 Heading + +##### h5 Heading + +###### h6 Heading + +## Horizontal Rules + +___ + +--- + +*** + +## Emphasis + +**This is bold text** + +__This is bold text__ + +*This is italic text* + +_This is italic text_ + +~~Strikethrough~~ + + +## Blockquotes + + +> Blockquotes can also be nested... +>> ...by using additional greater-than signs right next to each other... +> > > ...or with spaces between arrows. + + +## Lists + +Unordered + ++ Create a list by starting a line with `+`, `-`, or `*` ++ Sub-lists are made by indenting 2 spaces: + - Marker character change forces new list start: + * Ac tristique libero volutpat at + + Facilisis in pretium nisl aliquet + - Nulla volutpat aliquam velit ++ Very easy! + +Ordered + +1. Lorem ipsum dolor sit amet +2. Consectetur adipiscing elit +3. Integer molestie lorem at massa + + +1. You can use sequential numbers... +1. ...or keep all the numbers as `1.` + +Start numbering with offset: + +57. foo +1. bar + + +## Code + +Inline `code` + +Indented code + + // Some comments + line 1 of code + line 2 of code + line 3 of code + + +Block code "fences" + +``` +Sample text here... +``` + +Syntax highlighting + +``` js +var foo = function (bar) { + return bar++; +}; + +console.log(foo(5)); +``` + +## Shortcodes + +## Tables + +| Option | Description | +| ------ | ----------- | +| data | path to data files to supply the data that will be passed into templates. | +| engine | engine to be used for processing templates. Handlebars is the default. | +| ext | extension to be used for dest files. | + +Right aligned columns + +| Option | Description | +| ------:| -----------:| +| data | path to data files to supply the data that will be passed into templates. | +| engine | engine to be used for processing templates. Handlebars is the default. | +| ext | extension to be used for dest files. | + + +## Links + +[link text](http://duckduckgo.com) + +[link with title](http://duckduckgo.com/ "Duck duck go") + +## Images + +![Minion](https://octodex.github.com/images/minion.png) +![Stormtroopocat](https://octodex.github.com/images/stormtroopocat.jpg "The Stormtroopocat") + +Like links, Images also have a footnote style syntax + +![Alt text][id] + +With a reference later in the document defining the URL location: + +[id]: https://octodex.github.com/images/dojocat.jpg "The Dojocat" + +## Smileys + +Like :smile:, :cry: + +### Footnotes + +Footnote 1 link[^first]. + +Footnote 2 link[^second]. + +Duplicated footnote reference[^second]. + +[^first]: Footnote **can have markup** +and multiple paragraphs. + +[^second]: Footnote text. + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} diff --git a/components/markdown/tests/shortcodes.rs b/components/markdown/tests/shortcodes.rs new file mode 100644 index 0000000000..a99a199d6d --- /dev/null +++ b/components/markdown/tests/shortcodes.rs @@ -0,0 +1,313 @@ +use config::Config; + +mod common; + +#[test] +fn can_render_simple_text_with_shortcodes() { + let body = common::render( + r#" +hello {{ out_put_id(id="shortcode-id") }} + +{% quote() %} +A quote +{% end %} + +{{ out_put_id(id="shortcode-id2") }} + +{{ out_put_id(id="shortcode-id3") }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_grab_lang_in_html_shortcodes() { + let body = common::render( + r#" +hello in {{ i18n() }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_grab_lang_in_md_shortcodes() { + let body = common::render( + r#" +{{ book() }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_body_shortcode_and_paragraph_after() { + let body = common::render( + r#" +{% quote() %} +This is a quote +{% end %} + +Here is another paragraph. + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_two_body_shortcode_and_paragraph_after_with_line_break_between() { + let body = common::render( + r#" +{% quote() %} +This is a quote +{% end %} + +{% quote() %} +This is a quote +{% end %} + +Here is another paragraph. + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn doesnt_render_ignored_shortcodes() { + let body = common::render( + r#" +{{/* youtube(id="w7Ft2ymGmfc") */}} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +// https://github.com/Keats/gutenberg/issues/522 +#[test] +fn doesnt_try_to_highlight_content_from_shortcode() { + let body = common::render( + r#" +{{ four_spaces() }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_emit_newlines_and_whitespace_with_shortcode() { + let body = common::render( + r#" +{% pre() %} +Hello + +Zola + +! + +{% end %} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_passthrough_markdown_from_shortcode() { + let body = common::render( + r#" +Hello + +{% md_passthrough() %} +# Passing through + +*to* **the** document +{% end %} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +// https://github.com/getzola/zola/issues/1172 +#[test] +fn doesnt_escape_html_shortcodes() { + let body = common::render( + r#" +{{ image(alt="something") }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn errors_on_unknown_shortcodes() { + let body = common::render( + r#" +{{ unknown() }} + "#, + ); + assert!(body.is_err()); +} + +// https://github.com/getzola/zola/issues/1172 +#[test] +fn can_render_commented_out_shortcodes() { + let body = common::render( + r#" + + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn invocation_count_increments_in_shortcode() { + let body = common::render( + r#" +{{ a() }} +{{ b() }} +{{ a() }} +{{ b() }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +// https://github.com/getzola/zola/issues/1689 +#[test] +fn html_shortcode_regression() { + let inputs = vec![ + r#"{{ ex2(page="") }} {{ ex1(page="") }} {{ ex3(page="std") }}"#, + r#"

{{ ex2(page="") }} {{ ex1(page="") }} {{ ex3(page="std") }}

"#, // in html + r#"

\n{{ ex2(page='') }}\n

"#, // with newlines + r#"{{ ex2(page='') }}\n**The Book** {{ ex2(page='') }}"#, + r#"a.{{ ex2(page="") }} b.{{ ex1(page="") }} c.{{ ex3(page="std") }}"#, + ]; + + for input in inputs { + let body = common::render(input).unwrap().body; + insta::assert_snapshot!(body); + } +} + +#[test] +fn can_split_shortcode_body_lines() { + let body = common::render( + r#" +{% split_lines() %} +multi +ple +lines +{% end %} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_shortcodes_with_tabs() { + // This can cause problems mostly because the 4 spaces sometimes used for tabs also are used + // to indicate code-blocks + let body = common::render( + r#" +{{ with_tabs() }} {{ with_tabs() }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +// https://github.com/getzola/zola/issues/1355 +#[test] +fn can_render_list_with_shortcode() { + let body = common::render( + r#" +* a +* b + {{ with_tabs() }} +* c + {{ with_tabs() }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +// https://github.com/getzola/zola/issues/1655 +#[test] +fn shortcodes_do_not_generate_paragraphs() { + let body = common::render( + r#" +{% web_component() %} +some code; +more code; + +other code here; +{% end %} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +#[test] +fn can_render_markdown_in_shortcodes() { + let mut config = Config::default_for_test(); + config.markdown.highlight_code = true; + let body = common::render_with_config( + r#" +{% render_md() %} + +``` +some code; +``` + +{% end %} + "#, + config, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} + +// https://github.com/getzola/zola/issues/1600 +#[test] +fn can_use_shortcodes_in_quotes() { + let body = common::render( + r#" +> test quote +> {{ image(alt="a quote") }} + "#, + ) + .unwrap() + .body; + insta::assert_snapshot!(body); +} diff --git a/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers.snap b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers.snap new file mode 100644 index 0000000000..31b639fdda --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 227 +expression: body + +--- +
1foo +
2bar +
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_windows_eol.snap b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_windows_eol.snap new file mode 100644 index 0000000000..1e79755a7e --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_windows_eol.snap @@ -0,0 +1,9 @@ +--- +source: components/markdown/tests/codeblocks.rs +assertion_line: 248 +expression: body +--- +
1foo +
2bar +
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_highlight.snap b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_highlight.snap new file mode 100644 index 0000000000..0df180ec57 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_highlight.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 253 +expression: body + +--- +
1foo +
2bar +
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_lineno_start.snap b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_lineno_start.snap new file mode 100644 index 0000000000..2a45955618 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_lineno_start.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 240 +expression: body + +--- +
40foo +
41bar +
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_hide_lines.snap b/components/markdown/tests/snapshots/codeblocks__can_hide_lines.snap new file mode 100644 index 0000000000..7719497feb --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_hide_lines.snap @@ -0,0 +1,11 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 36 +expression: res.body + +--- +
foo
+baz
+bat
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_all_lines.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_all_lines.snap new file mode 100644 index 0000000000..877ab4b512 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_all_lines.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 83 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_at_end.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_at_end.snap new file mode 100644 index 0000000000..e64b5ac991 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_at_end.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 124 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_line_range.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_line_range.snap new file mode 100644 index 0000000000..d0fbe584c1 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_line_range.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 68 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_mix_line_ranges.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_mix_line_ranges.snap new file mode 100644 index 0000000000..a726ac1a1e --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_mix_line_ranges.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 184 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_out_of_bounds.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_out_of_bounds.snap new file mode 100644 index 0000000000..22ec749e4d --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_out_of_bounds.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 139 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_ranges_overlap.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_ranges_overlap.snap new file mode 100644 index 0000000000..d6af7967e1 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_ranges_overlap.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 154 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_reversed_range.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_reversed_range.snap new file mode 100644 index 0000000000..3e596c9fc6 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_reversed_range.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 214 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line.snap new file mode 100644 index 0000000000..fa619ec194 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 53 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line_range.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line_range.snap new file mode 100644 index 0000000000..8bb8a44eee --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line_range.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 199 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_unknown_lang.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_unknown_lang.snap new file mode 100644 index 0000000000..5dbe5592a4 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_unknown_lang.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 323 +expression: body + +--- +
foo
+bar
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_highlight_weird_fence_tokens.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_weird_fence_tokens.snap new file mode 100644 index 0000000000..e9b9d8c150 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_highlight_weird_fence_tokens.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 169 +expression: body + +--- +
foo
+bar
+bar
+baz
+
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_render_completely_mixed_codeblock.snap b/components/markdown/tests/snapshots/codeblocks__can_render_completely_mixed_codeblock.snap new file mode 100644 index 0000000000..72b7d59bfd --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_render_completely_mixed_codeblock.snap @@ -0,0 +1,19 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 310 +expression: body + +--- +
1<a href="javascript:void(0);">{{ before(texts="1") }}</a> +
2Normally people would not write something & like <> this: +
3<div id="custom-attr"> +
4An inline dQw4w9WgXcQ shortcode +
5</div> +
6Plain text in-between +
7{% quote(author="Vincent") %} +
8A quote +
9{% end %} +
10{# A Tera comment, you should see it #} +
11<!-- end text goes here --> +
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_render_multiple_shortcodes_in_codeblock.snap b/components/markdown/tests/snapshots/codeblocks__can_render_multiple_shortcodes_in_codeblock.snap new file mode 100644 index 0000000000..e9f80704ec --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_render_multiple_shortcodes_in_codeblock.snap @@ -0,0 +1,13 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 288 +expression: body + +--- +
1text1 +
2first +
3text2 +
4second +
5text3 +
+ diff --git a/components/markdown/tests/snapshots/codeblocks__can_render_shortcode_in_codeblock.snap b/components/markdown/tests/snapshots/codeblocks__can_render_shortcode_in_codeblock.snap new file mode 100644 index 0000000000..71e39e44f4 --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__can_render_shortcode_in_codeblock.snap @@ -0,0 +1,11 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 272 +expression: body + +--- +
1<div id="custom-attr"> +
2dQw4w9WgXcQ +
3</div> +
+ diff --git a/components/markdown/tests/snapshots/codeblocks__does_nothing_with_highlighting_disabled.snap b/components/markdown/tests/snapshots/codeblocks__does_nothing_with_highlighting_disabled.snap new file mode 100644 index 0000000000..87bd49f0fa --- /dev/null +++ b/components/markdown/tests/snapshots/codeblocks__does_nothing_with_highlighting_disabled.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/codeblocks.rs +assertion_line: 336 +expression: body + +--- +
foo
+bar
+
+ diff --git a/components/markdown/tests/snapshots/markdown__all_markdown_features_integration.snap b/components/markdown/tests/snapshots/markdown__all_markdown_features_integration.snap new file mode 100644 index 0000000000..72aedc4c21 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__all_markdown_features_integration.snap @@ -0,0 +1,131 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 358 +expression: body + +--- + +

h1 Heading

+

h2 Heading

+

h3 Heading

+

h4 Heading

+
h5 Heading
+
h6 Heading
+

Horizontal Rules

+
+
+
+

Emphasis

+

This is bold text

+

This is bold text

+

This is italic text

+

This is italic text

+

Strikethrough

+

Blockquotes

+
+

Blockquotes can also be nested...

+
+

...by using additional greater-than signs right next to each other...

+
+

...or with spaces between arrows.

+
+
+
+

Lists

+

Unordered

+
    +
  • Create a list by starting a line with +, -, or *
  • +
  • Sub-lists are made by indenting 2 spaces: +
      +
    • Marker character change forces new list start: +
        +
      • Ac tristique libero volutpat at
      • +
      +
        +
      • Facilisis in pretium nisl aliquet
      • +
      +
        +
      • Nulla volutpat aliquam velit
      • +
      +
    • +
    +
  • +
  • Very easy!
  • +
+

Ordered

+
    +
  1. +

    Lorem ipsum dolor sit amet

    +
  2. +
  3. +

    Consectetur adipiscing elit

    +
  4. +
  5. +

    Integer molestie lorem at massa

    +
  6. +
  7. +

    You can use sequential numbers...

    +
  8. +
  9. +

    ...or keep all the numbers as 1.

    +
  10. +
+

Start numbering with offset:

+
    +
  1. foo
  2. +
  3. bar
  4. +
+

Code

+

Inline code

+

Indented code

+
// Some comments
+line 1 of code
+line 2 of code
+line 3 of code
+
+

Block code "fences"

+
Sample text here...
+
+

Syntax highlighting

+
var foo = function (bar) {
+  return bar++;
+};
+
+console.log(foo(5));
+
+

Shortcodes

+

Tables

+ + + + +
OptionDescription
datapath to data files to supply the data that will be passed into templates.
engineengine to be used for processing templates. Handlebars is the default.
extextension to be used for dest files.
+

Right aligned columns

+ + + + +
OptionDescription
datapath to data files to supply the data that will be passed into templates.
engineengine to be used for processing templates. Handlebars is the default.
extextension to be used for dest files.
+ +

link text

+

link with title

+

Images

+

Minion +Stormtroopocat

+

Like links, Images also have a footnote style syntax

+

Alt text

+

With a reference later in the document defining the URL location:

+

Smileys

+

Like :smile:, :cry:

+

Footnotes

+

Footnote 1 link1.

+

Footnote 2 link2.

+

Duplicated footnote reference2.

+
1 +

Footnote can have markup +and multiple paragraphs.

+
+
2 +

Footnote text.

+
+ diff --git a/components/markdown/tests/snapshots/markdown__can_customise_anchor_template.snap b/components/markdown/tests/snapshots/markdown__can_customise_anchor_template.snap new file mode 100644 index 0000000000..5ccac5fb30 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_customise_anchor_template.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 127 +expression: body + +--- +

Hello (in en)

+ diff --git a/components/markdown/tests/snapshots/markdown__can_handle_heading_ids-2.snap b/components/markdown/tests/snapshots/markdown__can_handle_heading_ids-2.snap new file mode 100644 index 0000000000..d2847a8e94 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_handle_heading_ids-2.snap @@ -0,0 +1,27 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 84 +expression: body + +--- +

Hello

+

Hello

+

L'écologie et vous

+

Hello

+

Hello

+

Hello

+

Workaround for literal {#…}

+

Auto

+

+

+

About

+

Rust

+

hi

+

hi

+

hi

+

text 1 there

+
1 +

footnote

+

Classes

+
+ diff --git a/components/markdown/tests/snapshots/markdown__can_handle_heading_ids.snap b/components/markdown/tests/snapshots/markdown__can_handle_heading_ids.snap new file mode 100644 index 0000000000..87cce483ae --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_handle_heading_ids.snap @@ -0,0 +1,27 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 79 +expression: body + +--- +

Hello

+

Hello

+

L'écologie et vous

+

Hello

+

Hello

+

Hello

+

Workaround for literal {#…}

+

Auto

+

+

+

About

+

Rust

+

hi

+

hi

+

hi

+

text 1 there

+
1 +

footnote

+

Classes

+
+ diff --git a/components/markdown/tests/snapshots/markdown__can_insert_anchors-2.snap b/components/markdown/tests/snapshots/markdown__can_insert_anchors-2.snap new file mode 100644 index 0000000000..de4b51915e --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_insert_anchors-2.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 108 +expression: body + +--- +

Hello🔗

+

World🔗

+

Hello!🔗

+

Rust🔗

+

Hello*_()🔗

+ diff --git a/components/markdown/tests/snapshots/markdown__can_insert_anchors-3.snap b/components/markdown/tests/snapshots/markdown__can_insert_anchors-3.snap new file mode 100644 index 0000000000..9051a6f936 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_insert_anchors-3.snap @@ -0,0 +1,10 @@ +--- +source: components/markdown/tests/markdown.rs +expression: body +--- +

Hello

+

World

+

Hello!

+

Rust

+

Hello*_()

+ diff --git a/components/markdown/tests/snapshots/markdown__can_insert_anchors.snap b/components/markdown/tests/snapshots/markdown__can_insert_anchors.snap new file mode 100644 index 0000000000..b1af26eb74 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_insert_anchors.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 105 +expression: body + +--- +

🔗Hello

+

🔗World

+

🔗Hello!

+

🔗Rust

+

🔗Hello*_()

+ diff --git a/components/markdown/tests/snapshots/markdown__can_make_zola_internal_links.snap b/components/markdown/tests/snapshots/markdown__can_make_zola_internal_links.snap new file mode 100644 index 0000000000..e5a57a1a12 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_make_zola_internal_links.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 43 +expression: body + +--- +

rel link +rel link with anchor +abs link

+ diff --git a/components/markdown/tests/snapshots/markdown__can_render_basic_markdown.snap b/components/markdown/tests/snapshots/markdown__can_render_basic_markdown.snap new file mode 100644 index 0000000000..e839beab96 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_render_basic_markdown.snap @@ -0,0 +1,13 @@ +--- +source: components/markdown/tests/markdown.rs +expression: body +--- +

Hello world

+

Hello world

+

Hello world +Hello +world +Non rendered emoji :smile: +a link +alt text

+

some html

diff --git a/components/markdown/tests/snapshots/markdown__can_use_external_links_options-2.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-2.snap new file mode 100644 index 0000000000..ae1b79e732 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-2.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 149 +expression: body + +--- +

https://google.com

+ diff --git a/components/markdown/tests/snapshots/markdown__can_use_external_links_options-3.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-3.snap new file mode 100644 index 0000000000..27a53f5693 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-3.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 155 +expression: body + +--- +

https://google.com

+ diff --git a/components/markdown/tests/snapshots/markdown__can_use_external_links_options-4.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-4.snap new file mode 100644 index 0000000000..ef73ab0bce --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-4.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 161 +expression: body + +--- +

https://google.com

+ diff --git a/components/markdown/tests/snapshots/markdown__can_use_external_links_options-5.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-5.snap new file mode 100644 index 0000000000..40edd8ddb2 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-5.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 168 +expression: body + +--- +

https://google.com

+ diff --git a/components/markdown/tests/snapshots/markdown__can_use_external_links_options.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options.snap new file mode 100644 index 0000000000..4f539aa00b --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_use_external_links_options.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 144 +expression: body + +--- +

https://google.com

+ diff --git a/components/markdown/tests/snapshots/markdown__can_use_smart_punctuation.snap b/components/markdown/tests/snapshots/markdown__can_use_smart_punctuation.snap new file mode 100644 index 0000000000..961ea2c8cb --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__can_use_smart_punctuation.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 135 +expression: body + +--- +

This – is “it”…

+ diff --git a/components/markdown/tests/snapshots/markdown__custom_url_schemes_are_untouched.snap b/components/markdown/tests/snapshots/markdown__custom_url_schemes_are_untouched.snap new file mode 100644 index 0000000000..ca553df885 --- /dev/null +++ b/components/markdown/tests/snapshots/markdown__custom_url_schemes_are_untouched.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/markdown.rs +assertion_line: 194 +expression: body + +--- +

foo@bar.tld

+

(123) 456-7890

+

blank page

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_emit_newlines_and_whitespace_with_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__can_emit_newlines_and_whitespace_with_shortcode.snap new file mode 100644 index 0000000000..b8765447bf --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_emit_newlines_and_whitespace_with_shortcode.snap @@ -0,0 +1,11 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 161 +expression: body + +--- +
Hello
+
+Zola
+
+!
diff --git a/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_html_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_html_shortcodes.snap new file mode 100644 index 0000000000..568a2709f2 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_html_shortcodes.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 75 +expression: body + +--- +

hello in en

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_md_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_md_shortcodes.snap new file mode 100644 index 0000000000..5e96f76443 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_md_shortcodes.snap @@ -0,0 +1,6 @@ +--- +source: components/markdown/tests/shortcodes.rs +expression: body +--- +

Book cover in en

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_shortcodes.snap new file mode 100644 index 0000000000..ca54e25445 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_shortcodes.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 50 +expression: body + +--- +

hello in en

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_passthrough_markdown_from_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__can_passthrough_markdown_from_shortcode.snap new file mode 100644 index 0000000000..a4bc83bb1e --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_passthrough_markdown_from_shortcode.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 184 +expression: body + +--- +

Hello

+

Passing through

+

to the document

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_render_body_shortcode_and_paragraph_after.snap b/components/markdown/tests/snapshots/shortcodes__can_render_body_shortcode_and_paragraph_after.snap new file mode 100644 index 0000000000..a873672db1 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_render_body_shortcode_and_paragraph_after.snap @@ -0,0 +1,9 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 76 +expression: body + +--- +This is a quote +

Here is another paragraph.

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_render_commented_out_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_render_commented_out_shortcodes.snap new file mode 100644 index 0000000000..c1284ab60f --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_render_commented_out_shortcodes.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 222 +expression: body + +--- + + diff --git a/components/markdown/tests/snapshots/shortcodes__can_render_list_with_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__can_render_list_with_shortcode.snap new file mode 100644 index 0000000000..4f4cd5ef8a --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_render_list_with_shortcode.snap @@ -0,0 +1,18 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 343 +expression: body + +--- +
    +
  • a
  • +
  • b +
    + Hello World! +
  • +
  • c +
    + Hello World! +
  • +
+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_render_markdown_in_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_render_markdown_in_shortcodes.snap new file mode 100644 index 0000000000..9030f3f453 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_render_markdown_in_shortcodes.snap @@ -0,0 +1,9 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 371 +expression: body + +--- +
some code;
+
+
diff --git a/components/markdown/tests/snapshots/shortcodes__can_render_shortcodes_with_tabs.snap b/components/markdown/tests/snapshots/shortcodes__can_render_shortcodes_with_tabs.snap new file mode 100644 index 0000000000..dd9532b330 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_render_shortcodes_with_tabs.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 326 +expression: body + +--- +

+ Hello World! +
+ Hello World! +

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_render_simple_text_with_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_render_simple_text_with_shortcodes.snap new file mode 100644 index 0000000000..76a1430c5e --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_render_simple_text_with_shortcodes.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 52 +expression: body + +--- +

hello shortcode-id

+A quoteshortcode-id2shortcode-id3 diff --git a/components/markdown/tests/snapshots/shortcodes__can_render_two_body_shortcode_and_paragraph_after_with_line_break_between.snap b/components/markdown/tests/snapshots/shortcodes__can_render_two_body_shortcode_and_paragraph_after_with_line_break_between.snap new file mode 100644 index 0000000000..57fa5602c4 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_render_two_body_shortcode_and_paragraph_after_with_line_break_between.snap @@ -0,0 +1,9 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 92 +expression: body + +--- +This is a quoteThis is a quote +

Here is another paragraph.

+ diff --git a/components/markdown/tests/snapshots/shortcodes__can_split_shortcode_body_lines.snap b/components/markdown/tests/snapshots/shortcodes__can_split_shortcode_body_lines.snap new file mode 100644 index 0000000000..5eab4618b1 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_split_shortcode_body_lines.snap @@ -0,0 +1,7 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 312 +expression: body + +--- +[multi, ple, lines] diff --git a/components/markdown/tests/snapshots/shortcodes__can_use_shortcodes_in_quotes.snap b/components/markdown/tests/snapshots/shortcodes__can_use_shortcodes_in_quotes.snap new file mode 100644 index 0000000000..62ffd9fb52 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__can_use_shortcodes_in_quotes.snap @@ -0,0 +1,11 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 385 +expression: body + +--- +
+

test quote +a quote

+
+ diff --git a/components/markdown/tests/snapshots/shortcodes__doesnt_escape_html_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__doesnt_escape_html_shortcodes.snap new file mode 100644 index 0000000000..c843f795f5 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__doesnt_escape_html_shortcodes.snap @@ -0,0 +1,7 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 198 +expression: body + +--- +something diff --git a/components/markdown/tests/snapshots/shortcodes__doesnt_render_ignored_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__doesnt_render_ignored_shortcodes.snap new file mode 100644 index 0000000000..64d9bfbabe --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__doesnt_render_ignored_shortcodes.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 104 +expression: body + +--- +

{{ youtube(id="w7Ft2ymGmfc") }}

+ diff --git a/components/markdown/tests/snapshots/shortcodes__doesnt_try_to_highlight_content_from_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__doesnt_try_to_highlight_content_from_shortcode.snap new file mode 100644 index 0000000000..eb39b83101 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__doesnt_try_to_highlight_content_from_shortcode.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 140 +expression: body + +--- + no highlight + or there diff --git a/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-2.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-2.snap new file mode 100644 index 0000000000..7e8a4df8a2 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-2.snap @@ -0,0 +1,7 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 293 +expression: body + +--- +

2 1 3

diff --git a/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-3.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-3.snap new file mode 100644 index 0000000000..550685fd0b --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-3.snap @@ -0,0 +1,7 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 293 +expression: body + +--- +

\n2\n

diff --git a/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-4.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-4.snap new file mode 100644 index 0000000000..2710a11496 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-4.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 293 +expression: body + +--- +

2\nThe Book 2

+ diff --git a/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-5.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-5.snap new file mode 100644 index 0000000000..ad3d779d9d --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-5.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 293 +expression: body + +--- +

a.2 b.1 c.3

+ diff --git a/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression.snap new file mode 100644 index 0000000000..7e01b0b93d --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression.snap @@ -0,0 +1,8 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 293 +expression: body + +--- +

2 1 3

+ diff --git a/components/markdown/tests/snapshots/shortcodes__invocation_count_increments_in_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__invocation_count_increments_in_shortcode.snap new file mode 100644 index 0000000000..3aa399d0f0 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__invocation_count_increments_in_shortcode.snap @@ -0,0 +1,11 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 254 +expression: body + +--- +

a: 1

+

b: 1

+

a: 2

+

b: 2

+ diff --git a/components/markdown/tests/snapshots/shortcodes__md_shortcode_regression.snap b/components/markdown/tests/snapshots/shortcodes__md_shortcode_regression.snap new file mode 100644 index 0000000000..d56cb996d8 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__md_shortcode_regression.snap @@ -0,0 +1,11 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 274 +expression: body + +--- +

ttest1

+

123

+

ttest2

+

123

+ diff --git a/components/markdown/tests/snapshots/shortcodes__shortcodes_do_not_generate_paragraphs.snap b/components/markdown/tests/snapshots/shortcodes__shortcodes_do_not_generate_paragraphs.snap new file mode 100644 index 0000000000..1d78091997 --- /dev/null +++ b/components/markdown/tests/snapshots/shortcodes__shortcodes_do_not_generate_paragraphs.snap @@ -0,0 +1,10 @@ +--- +source: components/rendering/tests/shortcodes.rs +assertion_line: 362 +expression: body + +--- +some code; +more code; + +other code here; diff --git a/components/markdown/tests/snapshots/summary__basic_summary.snap b/components/markdown/tests/snapshots/summary__basic_summary.snap new file mode 100644 index 0000000000..1b75a3a331 --- /dev/null +++ b/components/markdown/tests/snapshots/summary__basic_summary.snap @@ -0,0 +1,13 @@ +--- +source: components/rendering/tests/summary.rs +assertion_line: 24 +expression: body + +--- +

Hello world!

+

Introduction

+
    +
  • first
  • +
  • second
  • +
+ diff --git a/components/markdown/tests/snapshots/summary__summary_with_shortcodes.snap b/components/markdown/tests/snapshots/summary__summary_with_shortcodes.snap new file mode 100644 index 0000000000..051f81b04f --- /dev/null +++ b/components/markdown/tests/snapshots/summary__summary_with_shortcodes.snap @@ -0,0 +1,12 @@ +--- +source: components/rendering/tests/summary.rs +assertion_line: 41 +expression: body + +--- +

a: 1

a: 2

+

Hello world

+

+
some code;
+
+ diff --git a/components/markdown/tests/summary.rs b/components/markdown/tests/summary.rs new file mode 100644 index 0000000000..774da55940 --- /dev/null +++ b/components/markdown/tests/summary.rs @@ -0,0 +1,47 @@ +mod common; + +fn get_summary(content: &str) -> String { + let rendered = common::render(content).unwrap(); + assert!(rendered.summary_len.is_some()); + let summary_len = rendered.summary_len.unwrap(); + rendered.body[..summary_len].to_owned() +} + +#[test] +fn basic_summary() { + let body = get_summary( + r#" +Hello world! + +# Introduction + +- first +- second + + + +And some content after + "#, + ); + insta::assert_snapshot!(body); +} + +// https://zola.discourse.group/t/zola-12-issue-with-continue-reading/590/7 +#[test] +fn summary_with_shortcodes() { + let body = get_summary( + r#" +{{ a() }} {{ a() }} +{% render_md() %} +# Hello world +{% end %} +``` +some code; +``` + + +And some content after + "#, + ); + insta::assert_snapshot!(body); +} diff --git a/components/markdown/tests/toc.rs b/components/markdown/tests/toc.rs new file mode 100644 index 0000000000..e0d0d03f5a --- /dev/null +++ b/components/markdown/tests/toc.rs @@ -0,0 +1,77 @@ +mod common; + +#[test] +fn can_make_simple_toc() { + let res = common::render( + r#" +# Heading 1 + +## Heading 2 + +## Another Heading 2 + +### Last one + "#, + ) + .unwrap(); + + let toc = res.toc; + assert_eq!(toc.len(), 1); + assert_eq!(toc[0].children.len(), 2); + assert_eq!(toc[0].children[1].children.len(), 1); +} + +#[test] +fn can_ignore_tags_in_toc() { + let res = common::render( + r#" +## heading with `code` + +## [anchor](https://duckduckgo.com/) in heading + +## **bold** and *italics* + "#, + ) + .unwrap(); + + let toc = res.toc; + assert_eq!(toc.len(), 3); + + assert_eq!(toc[0].id, "heading-with-code"); + assert_eq!(toc[0].title, "heading with code"); + + assert_eq!(toc[1].id, "anchor-in-heading"); + assert_eq!(toc[1].title, "anchor in heading"); + + assert_eq!(toc[2].id, "bold-and-italics"); + assert_eq!(toc[2].title, "bold and italics"); +} + +#[test] +fn can_make_toc_all_levels() { + let res = common::render( + r#" +# A + +## B1 + +## B2 + +### C + +#### D + +##### E + +###### F +"#, + ) + .unwrap(); + + let toc = res.toc; + assert_eq!(toc.len(), 1); + assert_eq!(toc[0].children.len(), 2); + assert_eq!(toc[0].children[1].children.len(), 1); + assert_eq!(toc[0].children[1].children[0].children.len(), 1); + assert_eq!(toc[0].children[1].children[0].children[0].children.len(), 1); +} diff --git a/components/rendering/Cargo.toml b/components/rendering/Cargo.toml deleted file mode 100644 index 74b6475c29..0000000000 --- a/components/rendering/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "rendering" -version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" -include = ["src/**/*"] - -[dependencies] -tera = { version = "1", features = ["preserve_order"] } -syntect = "4" -pulldown-cmark = { version = "0.8", default-features = false } -serde = "1" -serde_derive = "1" -pest = "2" -pest_derive = "2" -regex = "1" -lazy_static = "1" -gh-emoji = "1.0" - -errors = { path = "../errors" } -front_matter = { path = "../front_matter" } -utils = { path = "../utils" } -config = { path = "../config" } -link_checker = { path = "../link_checker" } - -[dev-dependencies] -templates = { path = "../templates" } - diff --git a/components/rendering/tests/codeblock_hide_lines.rs b/components/rendering/tests/codeblock_hide_lines.rs deleted file mode 100644 index 72a2f8dc66..0000000000 --- a/components/rendering/tests/codeblock_hide_lines.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::collections::HashMap; - -use tera::Tera; - -use config::Config; -use front_matter::InsertAnchor; -use rendering::{render_content, RenderContext}; - -macro_rules! colored_html_line { - ( $s:expr ) => {{ - let mut result = "".to_string(); - result.push_str($s); - result.push_str("\n"); - result - }}; -} - -macro_rules! colored_html { - ( $($s:expr),* $(,)* ) => {{ - let mut result = "
".to_string();
-        $(
-            result.push_str(colored_html_line!($s).as_str());
-        )*
-        result.push_str("
\n"); - result - }}; -} - -#[test] -fn hide_lines_simple() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hide_lines=2 -foo -bar -baz -bat -``` - "#, - &context, - ) - .unwrap(); - assert_eq!(res.body, colored_html!("foo", "baz", "bat")); -} diff --git a/components/rendering/tests/codeblock_hl_lines.rs b/components/rendering/tests/codeblock_hl_lines.rs deleted file mode 100644 index e002a06529..0000000000 --- a/components/rendering/tests/codeblock_hl_lines.rs +++ /dev/null @@ -1,515 +0,0 @@ -use std::collections::HashMap; - -use tera::Tera; - -use config::Config; -use front_matter::InsertAnchor; -use rendering::{render_content, RenderContext}; - -macro_rules! colored_html_line { - ( @no $s:expr ) => {{ - let mut result = "".to_string(); - result.push_str($s); - result.push_str("\n"); - result - }}; - ( @hl $s:expr ) => {{ - let mut result = "".to_string(); - result.push_str(""); - result.push_str($s); - result.push_str("\n"); - result.push_str(""); - result - }}; -} - -macro_rules! colored_html { - ( $(@$kind:tt $s:expr),* $(,)* ) => {{ - let mut result = "
".to_string();
-        $(
-            result.push_str(colored_html_line!(@$kind $s).as_str());
-        )*
-        result.push_str("
\n"); - result - }}; -} - -#[test] -fn hl_lines_simple() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=2 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @no "foo", - @hl "bar", - @no "bar", - @no "baz", - ) - ); -} - -#[test] -fn hl_lines_in_middle() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=2-3 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @no "foo", - @hl "bar", - @hl "bar", - @no "baz", - ) - ); -} - -#[test] -fn hl_lines_all() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=1-4 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @hl "foo", - @hl "bar", - @hl "bar", - @hl "baz", - ) - ); -} - -#[test] -fn hl_lines_start_from_one() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=1-3 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @hl "foo", - @hl "bar", - @hl "bar", - @no "baz", - ) - ); -} - -#[test] -fn hl_lines_start_from_zero() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=0-3 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @hl "foo", - @hl "bar", - @hl "bar", - @no "baz", - ) - ); -} - -#[test] -fn hl_lines_end() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=3-4 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @no "foo", - @no "bar", - @hl "bar", - @hl "baz", - ) - ); -} - -#[test] -fn hl_lines_end_out_of_bounds() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=3-4294967295 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @no "foo", - @no "bar", - @hl "bar", - @hl "baz", - ) - ); -} - -#[test] -fn hl_lines_overlap() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=2-3 1-2 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @hl "foo", - @hl "bar", - @hl "bar", - @no "baz", - ) - ); -} -#[test] -fn hl_lines_multiple() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=2-3,hl_lines=1-2 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @hl "foo", - @hl "bar", - @hl "bar", - @no "baz", - ) - ); -} - -#[test] -fn hl_lines_extra_spaces() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -``` hl_lines = 2 - 3 1 - 2 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @hl "foo", - @hl "bar", - @hl "bar", - @no "baz", - ) - ); -} - -#[test] -fn hl_lines_int_and_range() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=1 3-4 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @hl "foo", - @no "bar", - @hl "bar", - @hl "baz", - ) - ); -} - -#[test] -fn hl_lines_single_line_range() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=2-2 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @no "foo", - @hl "bar", - @no "bar", - @no "baz", - ) - ); -} - -#[test] -fn hl_lines_reverse_range() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```hl_lines=3-2 -foo -bar -bar -baz -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - colored_html!( - @no "foo", - @hl "bar", - @hl "bar", - @no "baz", - ) - ); -} diff --git a/components/rendering/tests/codeblock_linenos.rs b/components/rendering/tests/codeblock_linenos.rs deleted file mode 100644 index 4315fba8e2..0000000000 --- a/components/rendering/tests/codeblock_linenos.rs +++ /dev/null @@ -1,97 +0,0 @@ -use std::collections::HashMap; - -use tera::Tera; - -use config::Config; -use front_matter::InsertAnchor; -use rendering::{render_content, RenderContext}; - -#[test] -fn can_add_line_numbers() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```linenos -foo -bar -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
1foo\n
2bar\n
\n" - ); -} - -#[test] -fn can_add_line_numbers_with_linenostart() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```linenos, linenostart=40 -foo -bar -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
40foo\n
41bar\n
\n" - ); -} - -#[test] -fn can_add_line_numbers_with_highlight() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content( - r#" -```linenos, hl_lines=2 -foo -bar -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
1foo\n
2bar\n
\n" - ); -} diff --git a/components/rendering/tests/codeblock_shortcode_mix.rs b/components/rendering/tests/codeblock_shortcode_mix.rs deleted file mode 100644 index e1781913b3..0000000000 --- a/components/rendering/tests/codeblock_shortcode_mix.rs +++ /dev/null @@ -1,271 +0,0 @@ -use std::collections::HashMap; - -use config::Config; -use front_matter::InsertAnchor; -use templates::ZOLA_TERA; -use rendering::{render_content, RenderContext}; - -#[test] -fn can_render_shortcode_in_codeblock() { - let permalinks_ctx = HashMap::new(); - let config = Config::default_for_test(); - let mut context = RenderContext::new( - &ZOLA_TERA, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&ZOLA_TERA); - context.set_shortcode_definitions(&shortcode_def); - // simple case - let res = render_content( - r#" -``` -{{ youtube(id="dQw4w9WgXcQ") }} -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
<div >\n    <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>\n</div>\n\n
\n" - ); - // mixed with other contents - let res = render_content( - r#" -``` -
-{{ youtube(id="dQw4w9WgXcQ") }} -
-``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
<div id="custom-attr">\n<div >\n    <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>\n</div>\n\n</div>\n
\n" - ); - // mixed content with syntax and line numbers - let res = render_content( - r#" -```html,linenos -
-{{ youtube(id="dQw4w9WgXcQ") }} -
-``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
1<div id="custom-attr">\n
2<div >\n
3 <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>\n
4</div>\n
5\n
6</div>\n
\n" - ); -} - -#[test] -fn can_render_multiple_shortcodes_in_codeblock() { - let permalinks_ctx = HashMap::new(); - let config = Config::default_for_test(); - let mut context = RenderContext::new( - &ZOLA_TERA, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&ZOLA_TERA); - context.set_shortcode_definitions(&shortcode_def); - // simple case - let res = render_content( - r#" -``` -{{ youtube(id="dQw4w9WgXcQ") }} -{{ gist(url="https://gist.github.com/Keats/e5fb6aad409f28721c0ba14161644c57", class="gist") }} -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
<div >\n    <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>\n</div>\n\n<div class="gist">\n    <script src="https:&#x2F;&#x2F;gist.github.com&#x2F;Keats&#x2F;e5fb6aad409f28721c0ba14161644c57.js"></script>\n</div>\n\n
\n" - ); - // mixed with other contents - let res = render_content( - r#" -``` -text 1 -{{ youtube(id="dQw4w9WgXcQ") }} -text 2 -{{ gist(url="https://gist.github.com/Keats/e5fb6aad409f28721c0ba14161644c57", class="gist") }} -text 3 -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
text 1\n<div >\n    <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>\n</div>\n\ntext 2\n<div class="gist">\n    <script src="https:&#x2F;&#x2F;gist.github.com&#x2F;Keats&#x2F;e5fb6aad409f28721c0ba14161644c57.js"></script>\n</div>\n\ntext 3\n
\n" - ); - // mixed content with syntax and line numbers - let res = render_content( - r#" -```html,linenos -text 1 -{{ youtube(id="dQw4w9WgXcQ") }} -text 2 -{{ gist(url="https://gist.github.com/Keats/e5fb6aad409f28721c0ba14161644c57", class="gist") }} -text 3 -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, -r#"
1<span>text 1</span> -
2<div > -
3 <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe> -
4</div> -
5 -
6<span>text 2</span> -
7<div class="gist"> -
8 <script src="https:&#x2F;&#x2F;gist.github.com&#x2F;Keats&#x2F;e5fb6aad409f28721c0ba14161644c57.js"></script> -
9</div> -
10 -
11<span>text 3</span> -
-"# - ); -} - -#[test] -fn is_highlighting_linenos_still_working() { - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let mut context = RenderContext::new( - &ZOLA_TERA, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&ZOLA_TERA); - context.set_shortcode_definitions(&shortcode_def); - // single shortcode mixed with syntax and line numbers - let res = render_content( - r#" -```html,linenos -
-{{ youtube(id="dQw4w9WgXcQ") }} -
-``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, - "
1<div id="custom-attr">\n
2<div >\n
3 <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>\n
4</div>\n
5\n
6</div>\n
\n" - ); - // multiple shortcode mixed with syntax and line numbers - let res = render_content( - r#" -```html,linenos -text 1 -{{ youtube(id="dQw4w9WgXcQ") }} -text 2 -{{ gist(url="https://gist.github.com/Keats/e5fb6aad409f28721c0ba14161644c57", class="gist") }} -text 3 -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, -r#"
1<span>text 1</span> -
2<div > -
3 <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe> -
4</div> -
5 -
6<span>text 2</span> -
7<div class="gist"> -
8 <script src="https:&#x2F;&#x2F;gist.github.com&#x2F;Keats&#x2F;e5fb6aad409f28721c0ba14161644c57.js"></script> -
9</div> -
10 -
11<span>text 3</span> -
-"# - ); -} - -#[test] -fn codeblock_shortcode_mix_all_stars() { - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let mut context = RenderContext::new( - &ZOLA_TERA, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&ZOLA_TERA); - context.set_shortcode_definitions(&shortcode_def); - // single shortcode mixed with syntax and line numbers - let res = render_content( - r#" -```html,linenos -{{/* before(texts="1") */}} -Normally people would not write something & like <> this: -
-An inline {{ youtube(id="dQw4w9WgXcQ", autoplay=true, class="youtube") }} shortcode -
-Plain text in-between -{%/* quote(author="Vincent") */%} -A quote -{%/* end */%} -{{ gist(url="https://gist.github.com/Keats/e5fb6aad409f28721c0ba14161644c57", class="gist") }} -{# A Tera comment, you should see it #} - -``` - "#, - &context, - ) - .unwrap(); - assert_eq!( - res.body, -r#"
1<a href="javascript:void(0);">{{ before(texts="1") }}</a> -
2Normally people would not write something & like <> this: -
3<div id="custom-attr"> -
4An inline <div class="youtube"> -
5 <iframe src="https://www.youtube-nocookie.com/embed/dQw4w9WgXcQ?autoplay=1" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe> -
6</div> -
7 shortcode -
8</div> -
9Plain text in-between -
10{% quote(author="Vincent") %} -
11A quote -
12{% end %} -
13<div class="gist"> -
14 <script src="https:&#x2F;&#x2F;gist.github.com&#x2F;Keats&#x2F;e5fb6aad409f28721c0ba14161644c57.js"></script> -
15</div> -
16 -
17{# A Tera comment, you should see it #} -
18<!-- end text goes here --> -
-"# - ); -} \ No newline at end of file diff --git a/components/rendering/tests/common/mod.rs b/components/rendering/tests/common/mod.rs deleted file mode 100644 index e086744fba..0000000000 --- a/components/rendering/tests/common/mod.rs +++ /dev/null @@ -1,16 +0,0 @@ -pub struct ShortCode { - pub name: &'static str, - pub output: &'static str, - pub is_md: bool, -} - -impl ShortCode { - pub const fn new(name: &'static str, output: &'static str, is_md: bool) -> ShortCode { - ShortCode { name, output, is_md } - } - - /// Return filename for shortcode - pub fn filename(&self) -> String { - format!("{}.{}", self.name, if self.is_md { "md" } else { "html" }) - } -} diff --git a/components/rendering/tests/integration.rs b/components/rendering/tests/integration.rs deleted file mode 100644 index 29606c8e2d..0000000000 --- a/components/rendering/tests/integration.rs +++ /dev/null @@ -1,342 +0,0 @@ -mod common; - -use common::ShortCode; - -const COMPLETE_PAGE: &str = r#" - - -# h1 Heading - -## h2 Heading - -### h3 Heading - -#### h4 Heading - -##### h5 Heading - -###### h6 Heading - -## Horizontal Rules - -___ - ---- - -*** - -## Emphasis - -**This is bold text** - -__This is bold text__ - -*This is italic text* - -_This is italic text_ - -~~Strikethrough~~ - - -## Blockquotes - - -> Blockquotes can also be nested... ->> ...by using additional greater-than signs right next to each other... -> > > ...or with spaces between arrows. - - -## Lists - -Unordered - -+ Create a list by starting a line with `+`, `-`, or `*` -+ Sub-lists are made by indenting 2 spaces: - - Marker character change forces new list start: - * Ac tristique libero volutpat at - + Facilisis in pretium nisl aliquet - - Nulla volutpat aliquam velit -+ Very easy! - -Ordered - -1. Lorem ipsum dolor sit amet -2. Consectetur adipiscing elit -3. Integer molestie lorem at massa - - -1. You can use sequential numbers... -1. ...or keep all the numbers as `1.` - -Start numbering with offset: - -57. foo -1. bar - - -## Code - -Inline `code` - -Indented code - - // Some comments - line 1 of code - line 2 of code - line 3 of code - - -Block code "fences" - -``` -Sample text here... -``` - -Syntax highlighting - -``` js -var foo = function (bar) { - return bar++; -}; - -console.log(foo(5)); -``` - -## Shortcodes - -{% quote(author="John Doe") %} -This is a test quote! -1900-01-01 -{% end %} - -## Tables - -| Option | Description | -| ------ | ----------- | -| data | path to data files to supply the data that will be passed into templates. | -| engine | engine to be used for processing templates. Handlebars is the default. | -| ext | extension to be used for dest files. | - -Right aligned columns - -| Option | Description | -| ------:| -----------:| -| data | path to data files to supply the data that will be passed into templates. | -| engine | engine to be used for processing templates. Handlebars is the default. | -| ext | extension to be used for dest files. | - - -## Links - -[link text](http://duckduckgo.com) - -[link with title](http://duckduckgo.com/) - -## Images - -![Minion](https://octodex.github.com/images/minion.png) -![Stormtroopocat](https://octodex.github.com/images/stormtroopocat.jpg "The Stormtroopocat") - -Like links, Images also have a footnote style syntax - -![Alt text][id] - -With a reference later in the document defining the URL location: - -[id]: https://octodex.github.com/images/dojocat.jpg "The Dojocat" - -### Footnotes - -Footnote 1 link[^first]. - -Footnote 2 link[^second]. - -Duplicated footnote reference[^second]. - -[^first]: Footnote **can have markup** -and multiple paragraphs. - -[^second]: Footnote text."#; - -#[test] -fn complete_page() { - let config = config::Config::default_for_test(); - - let mut tera = tera::Tera::default(); - - let shortcodes: Vec = vec![ShortCode::new( - "quote", - r"
-{{ body }}
--- {{ author}} -
", - false, - )]; - - let mut permalinks = std::collections::HashMap::new(); - - permalinks.insert("".to_string(), "".to_string()); - - // Add all shortcodes - for ShortCode { name, is_md, output } in shortcodes.into_iter() { - tera.add_raw_template( - &format!("shortcodes/{}.{}", name, if is_md { "md" } else { "html" }), - &output, - ) - .unwrap(); - } - - let mut context = rendering::RenderContext::new( - &tera, - &config, - &config.default_language, - "", - &permalinks, - front_matter::InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&tera); - context.set_shortcode_definitions(&shortcode_def); - - let rendered = rendering::render_content(COMPLETE_PAGE, &context); - assert!(rendered.is_ok(), "Rendering failed"); - - let rendered = rendered.unwrap(); - - let asserted_internal_links: Vec<(String, Option)> = vec![]; - let asserted_external_links: Vec = - vec!["http://duckduckgo.com".to_string(), "http://duckduckgo.com/".to_string()]; - - assert_eq!(rendered.internal_links, asserted_internal_links, "Internal links unequal"); - assert_eq!(rendered.external_links, asserted_external_links, "External links unequal"); - - assert_eq!( - rendered.body, - r##" -

h1 Heading

-

h2 Heading

-

h3 Heading

-

h4 Heading

-
h5 Heading
-
h6 Heading
-

Horizontal Rules

-
-
-
-

Emphasis

-

This is bold text

-

This is bold text

-

This is italic text

-

This is italic text

-

Strikethrough

-

Blockquotes

-
-

Blockquotes can also be nested...

-
-

...by using additional greater-than signs right next to each other...

-
-

...or with spaces between arrows.

-
-
-
-

Lists

-

Unordered

-
    -
  • Create a list by starting a line with +, -, or *
  • -
  • Sub-lists are made by indenting 2 spaces: -
      -
    • Marker character change forces new list start: -
        -
      • Ac tristique libero volutpat at
      • -
      -
        -
      • Facilisis in pretium nisl aliquet
      • -
      -
        -
      • Nulla volutpat aliquam velit
      • -
      -
    • -
    -
  • -
  • Very easy!
  • -
-

Ordered

-
    -
  1. -

    Lorem ipsum dolor sit amet

    -
  2. -
  3. -

    Consectetur adipiscing elit

    -
  4. -
  5. -

    Integer molestie lorem at massa

    -
  6. -
  7. -

    You can use sequential numbers...

    -
  8. -
  9. -

    ...or keep all the numbers as 1.

    -
  10. -
-

Start numbering with offset:

-
    -
  1. foo
  2. -
  3. bar
  4. -
-

Code

-

Inline code

-

Indented code

-
// Some comments
-line 1 of code
-line 2 of code
-line 3 of code
-
-

Block code "fences"

-
Sample text here...
-
-

Syntax highlighting

-
var foo = function (bar) {
-  return bar++;
-};
-
-console.log(foo(5));
-
-

Shortcodes

-
-This is a test quote! -1900-01-01
--- John Doe -

Tables

- - - - -
OptionDescription
datapath to data files to supply the data that will be passed into templates.
engineengine to be used for processing templates. Handlebars is the default.
extextension to be used for dest files.
-

Right aligned columns

- - - - -
OptionDescription
datapath to data files to supply the data that will be passed into templates.
engineengine to be used for processing templates. Handlebars is the default.
extextension to be used for dest files.
- -

link text

-

link with title

-

Images

-

Minion -Stormtroopocat

-

Like links, Images also have a footnote style syntax

-

Alt text

-

With a reference later in the document defining the URL location:

-

Footnotes

-

Footnote 1 link1.

-

Footnote 2 link2.

-

Duplicated footnote reference2.

-
1 -

Footnote can have markup -and multiple paragraphs.

-
-
2 -

Footnote text.

-
-"## - ); -} diff --git a/components/rendering/tests/links.rs b/components/rendering/tests/links.rs deleted file mode 100644 index bc735cf768..0000000000 --- a/components/rendering/tests/links.rs +++ /dev/null @@ -1,62 +0,0 @@ -use std::collections::HashMap; - -use errors::Result; -use rendering::Rendered; - -mod common; - -fn render_content(content: &str, permalinks: HashMap) -> Result { - let config = config::Config::default_for_test(); - let tera = tera::Tera::default(); - let mut context = rendering::RenderContext::new( - &tera, - &config, - &config.default_language, - "http://mypage.com", - &permalinks, - front_matter::InsertAnchor::None, - ); - context.set_current_page_path("mine.md"); - - rendering::render_content(content, &context) -} - -#[test] -fn can_detect_links() { - // no links - let rendered = render_content("Hello World!", HashMap::new()).unwrap(); - assert_eq!(rendered.internal_links.len(), 0); - assert_eq!(rendered.external_links.len(), 0); - - // external - let rendered = render_content("[abc](https://google.com/)", HashMap::new()).unwrap(); - assert_eq!(rendered.internal_links.len(), 0); - assert_eq!(rendered.external_links.len(), 1); - assert_eq!(rendered.external_links[0], "https://google.com/"); - - // internal - let mut permalinks = HashMap::new(); - permalinks.insert("def/123.md".to_owned(), "https://xyz.com/def/123".to_owned()); - let rendered = render_content("[abc](@/def/123.md)", permalinks).unwrap(); - assert_eq!(rendered.internal_links.len(), 1); - assert_eq!(rendered.internal_links[0], ("def/123.md".to_owned(), None)); - assert_eq!(rendered.external_links.len(), 0); - - // internal with anchors - let mut permalinks = HashMap::new(); - permalinks.insert("def/123.md".to_owned(), "https://xyz.com/def/123".to_owned()); - let rendered = render_content("[abc](@/def/123.md#hello)", permalinks).unwrap(); - assert_eq!(rendered.internal_links.len(), 1); - assert_eq!(rendered.internal_links[0], ("def/123.md".to_owned(), Some("hello".to_owned()))); - assert_eq!(rendered.external_links.len(), 0); - - // internal link referring to self - let rendered = render_content("[abc](#hello)", HashMap::new()).unwrap(); - assert_eq!(rendered.internal_links.len(), 1); - assert_eq!(rendered.internal_links[0], ("mine.md".to_owned(), Some("hello".to_owned()))); - assert_eq!(rendered.external_links.len(), 0); - - // Not pointing to anything so that's an error - let res = render_content("[abc](@/def/123.md)", HashMap::new()); - assert!(res.is_err()); -} diff --git a/components/rendering/tests/markdown.rs b/components/rendering/tests/markdown.rs deleted file mode 100644 index bcd0eda297..0000000000 --- a/components/rendering/tests/markdown.rs +++ /dev/null @@ -1,1704 +0,0 @@ -use std::collections::HashMap; - -use tera::Tera; - -use config::Config; -use front_matter::InsertAnchor; -use rendering::{render_content, RenderContext}; -use templates::ZOLA_TERA; -use utils::slugs::SlugifyStrategy; - -#[test] -fn can_do_render_content_simple() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let config = Config::default_for_test(); - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content("hello", &context).unwrap(); - assert_eq!(res.body, "

hello

\n"); -} - -#[test] -fn doesnt_highlight_code_block_with_highlighting_off() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = false; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content("```\n$ gutenberg server\n```", &context).unwrap(); - assert_eq!(res.body, "
$ gutenberg server\n
\n"); -} - -#[test] -fn can_highlight_code_block_no_lang() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content("```\n$ gutenberg server\n$ ping\n```", &context).unwrap(); - assert_eq!( - res.body, - "
$ gutenberg server\n$ ping\n
\n" - ); -} - -#[test] -fn can_highlight_code_block_with_lang() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content("```python\nlist.append(1)\n```", &context).unwrap(); - assert_eq!( - res.body, - "
list.append(1)\n
\n" - ); -} - -#[test] -fn can_higlight_code_block_with_unknown_lang() { - let tera_ctx = Tera::default(); - let permalinks_ctx = HashMap::new(); - let mut config = Config::default_for_test(); - config.markdown.highlight_code = true; - let context = RenderContext::new( - &tera_ctx, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let res = render_content("```yolo\nlist.append(1)\n```", &context).unwrap(); - // defaults to plain text - assert_eq!( - res.body, - "
list.append(1)\n
\n" - ); -} - -#[test] -fn can_render_shortcode() { - let permalinks_ctx = HashMap::new(); - let config = Config::default_for_test(); - let mut context = RenderContext::new( - &ZOLA_TERA, - &config, - &config.default_language, - "", - &permalinks_ctx, - InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&ZOLA_TERA); - context.set_shortcode_definitions(&shortcode_def); - let res = render_content( - r#" -Hello - -{{ youtube(id="ub36ffWAqgQ") }} - "#, - &context, - ) - .unwrap(); - println!("{:?}", res.body); - assert!(res.body.contains("

Hello

\n
")); - assert!(res - .body - .contains(r#"\n
\n\ntest quote

\n\n", - [] - ); -} - -const GOOGLE_SHORTCODE: ShortCode = ShortCode::new( - "google", - r#""#, - false, -); -// https://github.com/getzola/zola/issues/1500 -#[test] -fn can_handle_issue_1500() { - test_scenario!( - r#"foo {{ google(query="apple") }} bar."#, - "

foo

bar.

\n", - [GOOGLE_SHORTCODE] - ); -} diff --git a/components/rendering/tests/summary.rs b/components/rendering/tests/summary.rs deleted file mode 100644 index c5d9de5a95..0000000000 --- a/components/rendering/tests/summary.rs +++ /dev/null @@ -1,118 +0,0 @@ -mod common; - -use common::ShortCode; - -macro_rules! test_scenario_summary { - ($in_str:literal, $summary:literal, [$($shortcodes:ident),*]) => { - let config = config::Config::default_for_test(); - - #[allow(unused_mut)] - let mut tera = tera::Tera::default(); - - // Add all shortcodes - $( - tera.add_raw_template( - &format!("shortcodes/{}", $shortcodes.filename()), - $shortcodes.output - ).expect("Failed to add raw template"); - )* - - let permalinks = std::collections::HashMap::new(); - let mut context = rendering::RenderContext::new( - &tera, - &config, - &config.default_language, - "", - &permalinks, - front_matter::InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&tera); - context.set_shortcode_definitions(&shortcode_def); - - let rendered = rendering::render_content($in_str, &context); - assert!(rendered.is_ok()); - let rendered = rendered.unwrap(); - - assert!(rendered.summary_len.is_some()); - - let summary_len = rendered.summary_len.unwrap(); - assert_eq!(&rendered.body[..summary_len], $summary); - } -} - -#[test] -fn basic_summary() { - test_scenario_summary!("Hello World!\n\nAnd others!", "

Hello World!

\n", []); - test_scenario_summary!( - "Hello World!\n\nWow!\n\nAnd others!", - "

Hello World!

\n

Wow!

\n", - [] - ); -} - -#[test] -fn summary_with_headers() { - test_scenario_summary!( - "# Hello World!\n\nAnd others!", - "

Hello World!

\n", - [] - ); - test_scenario_summary!( - "# Hello World!\n\nWow!\n\nAnd others!", - "

Hello World!

\n

Wow!

\n", - [] - ); -} - -const MD_SIMPLE: ShortCode = - ShortCode::new("simple", "A lot of text to insert into the document", true); -const HTML_SIMPLE: ShortCode = - ShortCode::new("simple", "A lot of text to insert into the document", true); - -#[test] -fn summary_with_md_shortcodes() { - test_scenario_summary!( - "{{ simple() }}\n\nAnd others!", - "

A lot of text to insert into the document

\n", - [MD_SIMPLE] - ); - test_scenario_summary!( - "{{ simple() }}\n\nWow!\n\nAnd others!", - "

A lot of text to insert into the document

\n

Wow!

\n", - [MD_SIMPLE] - ); -} - -#[test] -fn summary_with_html_shortcodes() { - test_scenario_summary!( - "{{ simple() }}\n\nAnd others!", - "

A lot of text to insert into the document

\n", - [HTML_SIMPLE] - ); - test_scenario_summary!( - "{{ simple() }}\n\nWow!\n\nAnd others!", - "

A lot of text to insert into the document

\n

Wow!

\n", - [HTML_SIMPLE] - ); -} - -// const INNER: ShortCode = ShortCode::new("inner", "World", false); -// -// const MD_RECURSIVE: ShortCode = ShortCode::new("outer", "Hello {{ inner() }}!", true); -// const HTML_RECURSIVE: ShortCode = ShortCode::new("outer", "Hello {{ inner() }}!", false); -// -// #[test] -// fn summary_with_recursive_shortcodes() { -// test_scenario_summary!( -// "{{ outer() }}\n\nAnd others!", -// "

Hello World!

\n", -// [MD_RECURSIVE, INNER] -// ); -// -// test_scenario_summary!( -// "{{ outer() }}\n\nAnd others!", -// "

Hello World!

\n", -// [HTML_RECURSIVE, INNER] -// ); -// } diff --git a/components/rendering/tests/toc.rs b/components/rendering/tests/toc.rs deleted file mode 100644 index d1b0f947c3..0000000000 --- a/components/rendering/tests/toc.rs +++ /dev/null @@ -1,112 +0,0 @@ -mod common; - -use common::ShortCode; -use rendering::Heading; - -#[derive(PartialEq, Debug)] -struct HelperHeader { - title: String, - children: Vec, -} - -impl PartialEq for HelperHeader { - fn eq(&self, other: &Heading) -> bool { - self.title == other.title && self.children == other.children - } -} - -macro_rules! hh { - ($title:literal, [$($children:expr),*]) => {{ - HelperHeader { - title: $title.to_string(), - children: vec![$($children),*], - } - }} -} - -macro_rules! test_toc { - ($in_str:literal, $toc:expr, [$($shortcodes:ident),*]) => { - let config = config::Config::default_for_test(); - - #[allow(unused_mut)] - let mut tera = tera::Tera::default(); - - // Add all shortcodes - $( - tera.add_raw_template( - &format!("shortcodes/{}", $shortcodes.filename()), - $shortcodes.output - ).expect("Failed to add raw template"); - )* - - let permalinks = std::collections::HashMap::new(); - let mut context = rendering::RenderContext::new( - &tera, - &config, - &config.default_language, - "", - &permalinks, - front_matter::InsertAnchor::None, - ); - let shortcode_def = utils::templates::get_shortcodes(&tera); - context.set_shortcode_definitions(&shortcode_def); - - let rendered = rendering::render_content($in_str, &context); - assert!(rendered.is_ok()); - - let rendered = rendered.unwrap(); - let toc = rendered.toc.clone(); - - assert!($toc == toc); - } -} - -#[test] -fn basic_toc() { - test_toc!("Hello World!", >::new(), []); - test_toc!("# ABC\n## DEF", vec![hh!("ABC", [hh!("DEF", [])])], []); -} - -#[test] -fn all_layers() { - test_toc!( - "# A\n## B\n### C\n#### D\n##### E\n###### F\n", - vec![hh!("A", [hh!("B", [hh!("C", [hh!("D", [hh!("E", [hh!("F", [])])])])])])], - [] - ); -} - -#[test] -fn multiple_on_layer() { - test_toc!( - "# A\n## B\n## C\n### D\n## E\n### F\n", - vec![hh!("A", [hh!("B", []), hh!("C", [hh!("D", [])]), hh!("E", [hh!("F", [])])])], - [] - ); -} - -// const MD_SIMPLE1: ShortCode = ShortCode::new("simple", "Hello World!", true); -// const MD_SIMPLE2: ShortCode = ShortCode::new("simple2", "Wow, much cool!", true); -// -// #[test] -// fn with_shortcode_titles() { -// test_toc!( -// "# {{ simple() }}\n## {{ simple2() }}\n### ABC\n#### {{ simple() }}\n", -// vec![hh!( -// "Hello World!", -// [hh!("Wow, much cool!", [hh!("ABC", [hh!("Hello World!", [])])])] -// )], -// [MD_SIMPLE1, MD_SIMPLE2] -// ); -// } -// -// const MD_MULTILINE: ShortCode = ShortCode::new("multiline", "
\n Wow!\n
", false); -// -// #[test] -// fn with_multiline_shortcodes() { -// test_toc!( -// "# {{ multiline() }}\n{{ multiline() }}\n## {{ multiline()() }}\n", -// vec![hh!("Wow!", [hh!("Wow!", [])])], -// [MD_MULTILINE] -// ); -// } diff --git a/components/search/Cargo.toml b/components/search/Cargo.toml index bb01dcc9e6..d2b26b528f 100644 --- a/components/search/Cargo.toml +++ b/components/search/Cargo.toml @@ -1,19 +1,10 @@ [package] name = "search" version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" +edition = "2021" [dependencies] -elasticlunr-rs = {version = "2", default-features = false, features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] } -ammonia = "3" -lazy_static = "1" - errors = { path = "../errors" } -library = { path = "../library" } +content = { path = "../content" } config = { path = "../config" } - -[features] -default = [] -indexing-zh = ["elasticlunr-rs/zh"] -indexing-ja = ["elasticlunr-rs/ja"] +libs = { path = "../libs" } diff --git a/components/search/src/lib.rs b/components/search/src/lib.rs index d53e59e855..d03b622a81 100644 --- a/components/search/src/lib.rs +++ b/components/search/src/lib.rs @@ -1,52 +1,48 @@ use std::collections::{HashMap, HashSet}; -use elasticlunr::pipeline; -use elasticlunr::pipeline::TokenizerFn; -use elasticlunr::{Index, Language}; -use lazy_static::lazy_static; +use libs::ammonia; +use libs::elasticlunr::{lang, Index, IndexBuilder}; +use libs::once_cell::sync::Lazy; use config::{Config, Search}; +use content::{Library, Section}; use errors::{bail, Result}; -use library::{Library, Section}; pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js"); -lazy_static! { - static ref AMMONIA: ammonia::Builder<'static> = { - let mut clean_content = HashSet::new(); - clean_content.insert("script"); - clean_content.insert("style"); - let mut builder = ammonia::Builder::new(); - builder - .tags(HashSet::new()) - .tag_attributes(HashMap::new()) - .generic_attributes(HashSet::new()) - .link_rel(None) - .allowed_classes(HashMap::new()) - .clean_content_tags(clean_content); - builder - }; -} - -fn build_fields(search_config: &Search) -> Vec { - let mut fields = vec![]; +static AMMONIA: Lazy> = Lazy::new(|| { + let mut clean_content = HashSet::new(); + clean_content.insert("script"); + clean_content.insert("style"); + let mut builder = ammonia::Builder::new(); + builder + .tags(HashSet::new()) + .tag_attributes(HashMap::new()) + .generic_attributes(HashSet::new()) + .link_rel(None) + .allowed_classes(HashMap::new()) + .clean_content_tags(clean_content); + builder +}); + +fn build_fields(search_config: &Search, mut index: IndexBuilder) -> IndexBuilder { if search_config.include_title { - fields.push("title".to_owned()); + index = index.add_field("title"); } if search_config.include_description { - fields.push("description".to_owned()); + index = index.add_field("description"); } if search_config.include_path { - fields.push("path".to_owned()); + index = index.add_field_with_tokenizer("path", Box::new(path_tokenizer)); } if search_config.include_content { - fields.push("body".to_owned()); + index = index.add_field("body") } - fields + index } fn path_tokenizer(text: &str) -> Vec { @@ -56,34 +52,6 @@ fn path_tokenizer(text: &str) -> Vec { .collect() } -fn build_tokenizers(search_config: &Search, language: Language) -> Vec { - let text_tokenizer = match language { - #[cfg(feature = "indexing-zh")] - Language::Chinese => pipeline::tokenize_chinese, - #[cfg(feature = "indexing-ja")] - Language::Japanese => pipeline::tokenize_japanese, - _ => pipeline::tokenize, - }; - let mut tokenizers: Vec = vec![]; - if search_config.include_title { - tokenizers.push(text_tokenizer); - } - - if search_config.include_description { - tokenizers.push(text_tokenizer); - } - - if search_config.include_path { - tokenizers.push(path_tokenizer); - } - - if search_config.include_content { - tokenizers.push(text_tokenizer); - } - - tokenizers -} - fn fill_index( search_config: &Search, title: &Option, @@ -112,7 +80,7 @@ fn fill_index( // TODO: fix it like the truncate in Tera match body.char_indices().nth(truncate_len) { None => row.push(body), - Some((idx, _)) => row.push((&body[..idx]).to_string()), + Some((idx, _)) => row.push((body[..idx]).to_string()), }; } else { row.push(body); @@ -127,26 +95,20 @@ fn fill_index( /// Errors if the language given is not available in Elasticlunr /// TODO: is making `in_search_index` apply to subsections of a `false` section useful? pub fn build_index(lang: &str, library: &Library, config: &Config) -> Result { - let language = match Language::from_code(lang) { + let language = match lang::from_code(lang) { Some(l) => l, None => { bail!("Tried to build search index for language {} which is not supported", lang); } }; let language_options = &config.languages[lang]; - let mut index = Index::with_language(language, &build_fields(&language_options.search)); - - let tokenizers = build_tokenizers(&language_options.search, language); + let mut index = IndexBuilder::with_language(language); + index = build_fields(&language_options.search, index); + let mut index = index.build(); - for section in library.sections_values() { + for (_, section) in &library.sections { if section.lang == lang { - add_section_to_index( - &mut index, - section, - library, - &language_options.search, - tokenizers.clone(), - ); + add_section_to_index(&mut index, section, library, &language_options.search); } } @@ -158,7 +120,6 @@ fn add_section_to_index( section: &Section, library: &Library, search_config: &Search, - tokenizers: Vec, ) { if !section.meta.in_search_index { return; @@ -166,7 +127,7 @@ fn add_section_to_index( // Don't index redirecting sections if section.meta.redirect_to.is_none() { - index.add_doc_with_tokenizers( + index.add_doc( §ion.permalink, &fill_index( search_config, @@ -175,17 +136,16 @@ fn add_section_to_index( §ion.path, §ion.content, ), - tokenizers.clone(), ); } for key in §ion.pages { - let page = library.get_page_by_key(*key); + let page = &library.pages[key]; if !page.meta.in_search_index { continue; } - index.add_doc_with_tokenizers( + index.add_doc( &page.permalink, &fill_index( search_config, @@ -194,7 +154,6 @@ fn add_section_to_index( &page.path, &page.content, ), - tokenizers.clone(), ); } } @@ -208,21 +167,21 @@ mod tests { #[test] fn can_build_fields() { let mut config = Config::default(); - let fields = build_fields(&config.search); - assert_eq!(fields, vec!["title", "body"]); + let index = build_fields(&config.search, IndexBuilder::new()).build(); + assert_eq!(index.get_fields(), vec!["title", "body"]); config.search.include_content = false; config.search.include_description = true; - let fields = build_fields(&config.search); - assert_eq!(fields, vec!["title", "description"]); + let index = build_fields(&config.search, IndexBuilder::new()).build(); + assert_eq!(index.get_fields(), vec!["title", "description"]); config.search.include_content = true; - let fields = build_fields(&config.search); - assert_eq!(fields, vec!["title", "description", "body"]); + let index = build_fields(&config.search, IndexBuilder::new()).build(); + assert_eq!(index.get_fields(), vec!["title", "description", "body"]); config.search.include_title = false; - let fields = build_fields(&config.search); - assert_eq!(fields, vec!["description", "body"]); + let index = build_fields(&config.search, IndexBuilder::new()).build(); + assert_eq!(index.get_fields(), vec!["description", "body"]); } #[test] diff --git a/components/site/Cargo.toml b/components/site/Cargo.toml index d8589dac63..3388f2dc88 100644 --- a/components/site/Cargo.toml +++ b/components/site/Cargo.toml @@ -6,33 +6,19 @@ edition = "2018" include = ["src/**/*"] [dependencies] -tera = "1" -glob = "0.3" -walkdir = "2" -rayon = "1" -serde = "1" -serde_derive = "1" -sass-rs = "0.2" -lazy_static = "1.1" -relative-path = "1" -slotmap = "1" -url = "2" +serde = { version = "1.0", features = ["derive"] } errors = { path = "../errors" } config = { path = "../config" } +console = { path = "../console" } utils = { path = "../utils" } templates = { path = "../templates" } -front_matter = { path = "../front_matter" } search = { path = "../search" } imageproc = { path = "../imageproc" } -library = { path = "../library" } link_checker = { path = "../link_checker" } +libs = { path = "../libs" } +content = { path = "../content" } [dev-dependencies] tempfile = "3" -path-slash = "0.1.4" - -[features] -default = [] -rust-tls = ["templates/rust-tls", "link_checker/rust-tls"] -native-tls = ["templates/native-tls", "link_checker/native-tls"] +path-slash = "0.2" diff --git a/components/site/benches/load.rs b/components/site/benches/load.rs index cd239c5665..2916fd4b8e 100644 --- a/components/site/benches/load.rs +++ b/components/site/benches/load.rs @@ -1,4 +1,4 @@ -//! Benchmarking loading/markdown rendering of generated sites of various sizes +//! Benchmarking loading/markdown markdown of generated sites of various sizes #![feature(test)] extern crate test; diff --git a/components/site/benches/site.rs b/components/site/benches/site.rs index 460a98422b..0ec69d9edc 100644 --- a/components/site/benches/site.rs +++ b/components/site/benches/site.rs @@ -3,7 +3,7 @@ extern crate test; use std::env; -use library::Paginator; +use content::Paginator; use site::Site; use tempfile::tempdir; @@ -43,7 +43,7 @@ fn bench_render_feed(b: &mut test::Bencher) { site.set_output_path(&public); b.iter(|| { site.render_feed( - site.library.read().unwrap().pages_values(), + site.library.read().unwrap().pages.values().collect(), None, &site.config.default_language, |c| c, @@ -68,7 +68,7 @@ fn bench_render_paginated(b: &mut test::Bencher) { let public = &tmp_dir.path().join("public"); site.set_output_path(&public); let library = site.library.read().unwrap(); - let section = library.sections_values()[0]; + let section = library.sections.values().collect::>()[0]; let paginator = Paginator::from_section(section, &library); b.iter(|| site.render_paginated(Vec::new(), &paginator)); diff --git a/components/site/src/feed.rs b/components/site/src/feed.rs index b64ea79550..8db2cc4617 100644 --- a/components/site/src/feed.rs +++ b/components/site/src/feed.rs @@ -1,12 +1,13 @@ +use std::cmp::Ordering; use std::path::PathBuf; -use rayon::prelude::*; -use serde_derive::Serialize; -use tera::Context; +use libs::rayon::prelude::*; +use libs::tera::Context; +use serde::Serialize; use crate::Site; +use content::{Page, TaxonomyTerm}; use errors::Result; -use library::{sort_actual_pages_by_date, Page, TaxonomyItem}; use utils::templates::render_template; #[derive(Debug, Clone, PartialEq, Serialize)] @@ -17,7 +18,7 @@ pub struct SerializedFeedTaxonomyItem<'a> { } impl<'a> SerializedFeedTaxonomyItem<'a> { - pub fn from_item(item: &'a TaxonomyItem) -> Self { + pub fn from_item(item: &'a TaxonomyTerm) -> Self { SerializedFeedTaxonomyItem { name: &item.name, slug: &item.slug, @@ -40,7 +41,14 @@ pub fn render_feed( return Ok(None); } - pages.par_sort_unstable_by(sort_actual_pages_by_date); + pages.par_sort_unstable_by(|a, b| { + let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); + if ord == Ordering::Equal { + a.permalink.cmp(&b.permalink) + } else { + ord + } + }); let mut context = Context::new(); context.insert( @@ -54,9 +62,12 @@ pub fn render_feed( ); let library = site.library.read().unwrap(); // limit to the last n elements if the limit is set; otherwise use all. - let num_entries = site.config.feed_limit.unwrap_or_else(|| pages.len()); - let p = - pages.iter().take(num_entries).map(|x| x.to_serialized_basic(&library)).collect::>(); + let num_entries = site.config.feed_limit.unwrap_or(pages.len()); + let p = pages + .iter() + .take(num_entries) + .map(|x| x.serialize_without_siblings(&library)) + .collect::>(); context.insert("pages", &p); context.insert("config", &site.config.serialize(lang)); diff --git a/components/site/src/lib.rs b/components/site/src/lib.rs index 2d80a09df5..788d46a5c1 100644 --- a/components/site/src/lib.rs +++ b/components/site/src/lib.rs @@ -1,5 +1,6 @@ pub mod feed; pub mod link_checking; +mod minify; pub mod sass; pub mod sitemap; pub mod tpls; @@ -9,29 +10,26 @@ use std::fs::remove_dir_all; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex, RwLock}; -use lazy_static::lazy_static; -use rayon::prelude::*; -use tera::{Context, Tera}; -use walkdir::{DirEntry, WalkDir}; +use libs::once_cell::sync::Lazy; +use libs::rayon::prelude::*; +use libs::tera::{Context, Tera}; +use libs::walkdir::{DirEntry, WalkDir}; use config::{get_config, Config}; -use errors::{bail, Error, Result}; -use front_matter::InsertAnchor; -use library::{find_taxonomies, Library, Page, Paginator, Section, Taxonomy}; -use relative_path::RelativePathBuf; +use content::{Library, Page, Paginator, Section, Taxonomy}; +use errors::{anyhow, bail, Context as ErrorContext, Result}; +use libs::relative_path::RelativePathBuf; use std::time::Instant; use templates::{load_tera, render_redirect_template}; use utils::fs::{ copy_directory, copy_file_if_needed, create_directory, create_file, ensure_directory_exists, }; -use utils::minify; use utils::net::get_available_port; use utils::templates::{render_template, ShortcodeDefinition}; +use utils::types::InsertAnchor; -lazy_static! { - /// The in-memory rendered map content - pub static ref SITE_CONTENT: Arc>> = Arc::new(RwLock::new(HashMap::new())); -} +pub static SITE_CONTENT: Lazy>>> = + Lazy::new(|| Arc::new(RwLock::new(HashMap::new()))); /// Where are we building the site #[derive(Debug, Clone, Copy, Eq, PartialEq)] @@ -73,7 +71,7 @@ impl Site { pub fn new, P2: AsRef>(path: P, config_file: P2) -> Result { let path = path.as_ref(); let config_file = config_file.as_ref(); - let mut config = get_config(config_file)?; + let mut config = get_config(&path.join(config_file))?; if let Some(theme) = config.theme.clone() { // Grab data from the extra section of the theme @@ -101,7 +99,7 @@ impl Site { permalinks: HashMap::new(), include_drafts: false, // We will allocate it properly later on - library: Arc::new(RwLock::new(Library::new(0, 0, false))), + library: Arc::new(RwLock::new(Library::default())), build_mode: BuildMode::Disk, shortcode_definitions, }; @@ -144,7 +142,7 @@ impl Site { self.live_reload = Some(live_reload_port); } - /// Reloads the templates and rebuild the site without re-rendering the Markdown. + /// Reloads the templates and rebuild the site without re-markdown the Markdown. pub fn reload_templates(&mut self) -> Result<()> { self.tera.full_reload()?; // TODO: be smarter than that, no need to recompile sass for example @@ -164,15 +162,16 @@ impl Site { /// Reads all .md files in the `content` directory and create pages/sections /// out of them pub fn load(&mut self) -> Result<()> { - let base_path = self.base_path.to_string_lossy().replace("\\", "/"); + let base_path = self.base_path.to_string_lossy().replace('\\', "/"); - self.library = Arc::new(RwLock::new(Library::new(0, 0, self.config.is_multilingual()))); + self.library = Arc::new(RwLock::new(Library::new(&self.config))); let mut pages_insert_anchors = HashMap::new(); // not the most elegant loop, but this is necessary to use skip_current_dir // which we can only decide to use after we've deserialised the section // so it's kinda necessecary - let mut dir_walker = WalkDir::new(format!("{}/{}", base_path, "content/")).into_iter(); + let mut dir_walker = + WalkDir::new(format!("{}/{}", base_path, "content/")).follow_links(true).into_iter(); let mut allowed_index_filenames: Vec<_> = self .config .other_languages() @@ -219,24 +218,22 @@ impl Site { // is it a section or not? if path.is_dir() { // if we are processing a section we have to collect - // index files for all languages and process them simultaniously + // index files for all languages and process them simultaneously // before any of the pages let index_files = WalkDir::new(&path) + .follow_links(true) .max_depth(1) .into_iter() .filter_map(|e| match e { Err(_) => None, Ok(f) => { let path_str = f.path().file_name().unwrap().to_str().unwrap(); + // https://github.com/getzola/zola/issues/1244 if f.path().is_file() && allowed_index_filenames.iter().any(|s| s == path_str) { Some(f) } else { - // https://github.com/getzola/zola/issues/1244 - if path_str.starts_with("_index.") { - println!("Expected a section filename, got `{}`. Allowed values: `{:?}`", path_str, &allowed_index_filenames); - } None } } @@ -273,9 +270,14 @@ impl Site { { let library = self.library.read().unwrap(); - let collisions = library.check_for_path_collisions(); + let collisions = library.find_path_collisions(); if !collisions.is_empty() { - return Err(Error::from_collisions(collisions)); + let mut msg = String::from("Found path collisions:\n"); + for (path, filepaths) in collisions { + let row = format!("- `{}` from files {:?}\n", path, filepaths); + msg.push_str(&row); + } + return Err(anyhow!(msg)); } } @@ -285,13 +287,52 @@ impl Site { tpls::register_early_global_fns(self)?; self.populate_sections(); self.render_markdown()?; + { + let mut lib = self.library.write().unwrap(); + lib.fill_backlinks(); + } tpls::register_tera_global_fns(self); // Needs to be done after rendering markdown as we only get the anchors at that point - link_checking::check_internal_links_with_anchors(self)?; + let internal_link_messages = link_checking::check_internal_links_with_anchors(self); + + // log any broken internal links and error out if needed + if !internal_link_messages.is_empty() { + let messages: Vec = internal_link_messages + .iter() + .enumerate() + .map(|(i, msg)| format!(" {}. {}", i + 1, msg)) + .collect(); + let msg = format!( + "Found {} broken internal anchor link(s)\n{}", + messages.len(), + messages.join("\n") + ); + match self.config.link_checker.internal_level { + config::LinkCheckerLevel::Warn => console::warn(&msg), + config::LinkCheckerLevel::Error => return Err(anyhow!(msg)), + } + } + // check external links, log the results, and error out if needed if self.config.is_in_check_mode() { - link_checking::check_external_links(self)?; + let external_link_messages = link_checking::check_external_links(self); + if !external_link_messages.is_empty() { + let messages: Vec = external_link_messages + .iter() + .enumerate() + .map(|(i, msg)| format!(" {}. {}", i + 1, msg)) + .collect(); + let msg = format!( + "Found {} broken external link(s)\n{}", + messages.len(), + messages.join("\n") + ); + match self.config.link_checker.external_level { + config::LinkCheckerLevel::Warn => console::warn(&msg), + config::LinkCheckerLevel::Error => return Err(anyhow!(msg)), + } + } } Ok(()) @@ -301,7 +342,7 @@ impl Site { /// a _index.md to render the index page at the root of the site pub fn create_default_index_sections(&mut self) -> Result<()> { for (index_path, lang) in self.index_section_paths() { - if let Some(index_section) = self.library.read().unwrap().get_section(&index_path) { + if let Some(index_section) = self.library.read().unwrap().sections.get(&index_path) { if self.config.build_search_index && !index_section.meta.in_search_index { bail!( "You have enabled search in the config but disabled it in the index section: \ @@ -312,7 +353,7 @@ impl Site { } let mut library = self.library.write().expect("Get lock for load"); // Not in else because of borrow checker - if !library.contains_section(&index_path) { + if !library.sections.contains_key(&index_path) { let mut index_section = Section::default(); index_section.file.parent = self.content_path.clone(); index_section.file.filename = @@ -324,14 +365,19 @@ impl Site { let filename = format!("_index.{}.md", l); index_section.file.path = self.content_path.join(&filename); index_section.file.relative = filename; + index_section.file.canonical = self.content_path.join(format!("_index.{}", l)); } else { index_section.file.name = "_index".to_string(); index_section.permalink = self.config.make_permalink(""); index_section.file.path = self.content_path.join("_index.md"); index_section.file.relative = "_index.md".to_string(); + index_section.file.canonical = self.content_path.join("_index"); index_section.path = "/".to_string(); } - index_section.lang = index_section.file.find_language(&self.config)?; + index_section.lang = index_section.file.find_language( + &self.config.default_language, + &self.config.other_languages_codes(), + )?; library.insert_section(index_section); } } @@ -350,7 +396,7 @@ impl Site { // This is needed in the first place because of silly borrow checker let mut pages_insert_anchors = HashMap::new(); - for (_, p) in self.library.read().unwrap().pages() { + for (_, p) in &self.library.read().unwrap().pages { pages_insert_anchors.insert( p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang), @@ -359,7 +405,7 @@ impl Site { let mut library = self.library.write().expect("Get lock for render_markdown"); library - .pages_mut() + .pages .values_mut() .collect::>() .par_iter_mut() @@ -376,7 +422,7 @@ impl Site { .collect::>()?; library - .sections_mut() + .sections .values_mut() .collect::>() .par_iter_mut() @@ -391,6 +437,16 @@ impl Site { /// Add a page to the site /// The `render` parameter is used in the serve command with --fast, when rebuilding a page. pub fn add_page(&mut self, mut page: Page, render_md: bool) -> Result<()> { + for taxa_name in page.meta.taxonomies.keys() { + if !self.config.has_taxonomy(taxa_name, &page.lang) { + bail!( + "Page `{}` has taxonomy `{}` which is not defined in config.toml", + page.file.path.display(), + taxa_name + ); + } + } + self.permalinks.insert(page.file.relative.clone(), page.permalink.clone()); if render_md { let insert_anchor = @@ -405,7 +461,7 @@ impl Site { } let mut library = self.library.write().expect("Get lock for add_page"); - library.remove_page(&page.file.path); + library.pages.remove(&page.file.path); library.insert_page(page); Ok(()) @@ -419,7 +475,7 @@ impl Site { self.populate_sections(); self.populate_taxonomies()?; let library = self.library.read().unwrap(); - let page = library.get_page(&path).unwrap(); + let page = library.pages.get(path).unwrap(); self.render_page(page) } @@ -436,7 +492,7 @@ impl Site { )?; } let mut library = self.library.write().expect("Get lock for add_section"); - library.remove_section(§ion.file.path); + library.sections.remove(§ion.file.path); library.insert_section(section); Ok(()) @@ -449,7 +505,7 @@ impl Site { self.add_section(section, true)?; self.populate_sections(); let library = self.library.read().unwrap(); - let section = library.get_section(&path).unwrap(); + let section = library.sections.get(path).unwrap(); self.render_section(section, true) } @@ -465,7 +521,7 @@ impl Site { } else { parent_path.join("_index.md") }; - match self.library.read().unwrap().get_section(&parent) { + match self.library.read().unwrap().sections.get(&parent) { Some(s) => s.meta.insert_anchor_links, None => InsertAnchor::None, } @@ -475,17 +531,12 @@ impl Site { /// as well as the pages for each section pub fn populate_sections(&mut self) { let mut library = self.library.write().expect("Get lock for populate_sections"); - library.populate_sections(&self.config); + library.populate_sections(&self.config, &self.content_path); } /// Find all the tags and categories if it's asked in the config pub fn populate_taxonomies(&mut self) -> Result<()> { - if self.config.taxonomies.is_empty() { - return Ok(()); - } - - self.taxonomies = find_taxonomies(&self.config, &self.library.read().unwrap())?; - + self.taxonomies = self.library.read().unwrap().find_taxonomies(&self.config); Ok(()) } @@ -538,8 +589,7 @@ impl Site { pub fn clean(&self) -> Result<()> { if self.output_path.exists() { // Delete current `public` directory so we can start fresh - remove_dir_all(&self.output_path) - .map_err(|e| Error::chain("Couldn't delete output directory", e))?; + remove_dir_all(&self.output_path).context("Couldn't delete output directory")?; } Ok(()) @@ -634,7 +684,7 @@ impl Site { } start = log_time(start, "Cleaned folder"); - // Generate/move all assets before rendering any content + // Generate/move all assets before markdown any content if let Some(ref theme) = self.config.theme { let theme_path = self.base_path.join("themes").join(theme); if theme_path.join("sass").exists() { @@ -666,15 +716,10 @@ impl Site { let library = self.library.read().unwrap(); if self.config.generate_feed { let is_multilingual = self.config.is_multilingual(); - let pages = if is_multilingual { - library - .pages_values() - .iter() - .filter(|p| p.lang == self.config.default_language) - .cloned() - .collect() + let pages: Vec<_> = if is_multilingual { + library.pages.values().filter(|p| p.lang == self.config.default_language).collect() } else { - library.pages_values() + library.pages.values().collect() }; self.render_feed(pages, None, &self.config.default_language, |c| c)?; start = log_time(start, "Generated feed in default language"); @@ -684,8 +729,7 @@ impl Site { if !language.generate_feed { continue; } - let pages = - library.pages_values().iter().filter(|p| &p.lang == code).cloned().collect(); + let pages: Vec<_> = library.pages.values().filter(|p| &p.lang == code).collect(); self.render_feed(pages, Some(&PathBuf::from(code)), code, |c| c)?; start = log_time(start, "Generated feed in other language"); } @@ -714,8 +758,8 @@ impl Site { for t in &self.config.markdown.highlight_themes_css { let p = self.static_path.join(&t.filename); if !p.exists() { - let content = &self.config.markdown.export_theme_css(&t.theme); - create_file(&p, &content)?; + let content = &self.config.markdown.export_theme_css(&t.theme)?; + create_file(&p, content)?; } } @@ -780,12 +824,12 @@ impl Site { pub fn render_aliases(&self) -> Result<()> { ensure_directory_exists(&self.output_path)?; let library = self.library.read().unwrap(); - for (_, page) in library.pages() { + for (_, page) in &library.pages { for alias in &page.meta.aliases { self.render_alias(alias, &page.permalink)?; } } - for (_, section) in library.sections() { + for (_, section) in &library.sections { for alias in §ion.meta.aliases { self.render_alias(alias, §ion.permalink)?; } @@ -818,6 +862,9 @@ impl Site { /// Renders all taxonomies pub fn render_taxonomies(&self) -> Result<()> { for taxonomy in &self.taxonomies { + if !taxonomy.kind.render { + continue; + } self.render_taxonomy(taxonomy)?; } @@ -870,9 +917,14 @@ impl Site { } if taxonomy.kind.feed { + let tax_path = if taxonomy.lang == self.config.default_language { + PathBuf::from(format!("{}/{}", taxonomy.slug, item.slug)) + } else { + PathBuf::from(format!("{}/{}/{}", taxonomy.lang, taxonomy.slug, item.slug)) + }; self.render_feed( - item.pages.iter().map(|p| library.get_page_by_key(*p)).collect(), - Some(&PathBuf::from(format!("{}/{}", taxonomy.slug, item.slug))), + item.pages.iter().map(|p| library.pages.get(p).unwrap()).collect(), + Some(&tax_path), &taxonomy.lang, |mut context: Context| { context.insert("taxonomy", &taxonomy.kind); @@ -957,8 +1009,7 @@ impl Site { if let Some(base) = base_path { let mut components = Vec::new(); for component in base.components() { - // TODO: avoid cloning the paths - components.push(component.as_os_str().to_string_lossy().as_ref().to_string()); + components.push(component.as_os_str().to_string_lossy()); } self.write_content( &components.iter().map(|x| x.as_ref()).collect::>(), @@ -999,13 +1050,13 @@ impl Site { if section.meta.generate_feed { let library = &self.library.read().unwrap(); - let pages = section.pages.iter().map(|k| library.get_page_by_key(*k)).collect(); + let pages = section.pages.iter().map(|k| library.pages.get(k).unwrap()).collect(); self.render_feed( pages, Some(&PathBuf::from(§ion.path[1..])), §ion.lang, |mut context: Context| { - context.insert("section", §ion.to_serialized(library)); + context.insert("section", §ion.serialize(library)); context }, )?; @@ -1028,7 +1079,7 @@ impl Site { section .pages .par_iter() - .map(|k| self.render_page(self.library.read().unwrap().get_page_by_key(*k))) + .map(|k| self.render_page(self.library.read().unwrap().pages.get(k).unwrap())) .collect::>()?; } @@ -1068,9 +1119,9 @@ impl Site { self.library .read() .unwrap() - .sections_values() - .into_par_iter() - .map(|s| self.render_section(s, true)) + .sections + .par_iter() + .map(|(_, s)| self.render_section(s, true)) .collect::>() } diff --git a/components/site/src/link_checking.rs b/components/site/src/link_checking.rs index b42b2d1a62..06768fe7b7 100644 --- a/components/site/src/link_checking.rs +++ b/components/site/src/link_checking.rs @@ -1,37 +1,34 @@ -use rayon::prelude::*; +use core::time; +use std::path::Path; +use std::{collections::HashMap, path::PathBuf, thread}; + +use config::LinkCheckerLevel; +use libs::rayon::prelude::*; use crate::Site; -use core::time; use errors::{bail, Result}; -use errors::{Error, ErrorKind}; -use std::{collections::HashMap, path::PathBuf, thread}; -use url::Url; +use libs::rayon; +use libs::url::Url; /// Check whether all internal links pointing to explicit anchor fragments are valid. /// /// This is very similar to `check_external_links`, although internal links checking -/// is always performed (while external ones only conditionally in `zola check`). -pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> { +/// is always performed (while external ones only conditionally in `zola check`). If broken links +/// are encountered, the `internal_level` setting in config.toml will determine whether they are +/// treated as warnings or errors. +pub fn check_internal_links_with_anchors(site: &Site) -> Vec { println!("Checking all internal links with anchors."); let library = site.library.write().expect("Get lock for check_internal_links_with_anchors"); // Chain all internal links, from both sections and pages. let page_links = library - .pages() + .pages .values() - .map(|p| { - let path = &p.file.path; - p.internal_links.iter().map(move |l| (path.clone(), l)) - }) - .flatten(); + .flat_map(|p| p.internal_links.iter().map(move |l| (p.file.path.clone(), l))); let section_links = library - .sections() + .sections .values() - .map(|p| { - let path = &p.file.path; - p.internal_links.iter().map(move |l| (path.clone(), l)) - }) - .flatten(); + .flat_map(|p| p.internal_links.iter().map(move |l| (p.file.path.clone(), l))); let all_links = page_links.chain(section_links); // Only keep links with anchor fragments, and count them too. @@ -46,7 +43,7 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> { // Check for targets existence (including anchors), then keep only the faulty // entries for error reporting purposes. - let missing_targets = links_with_anchors.filter(|(_, md_path, anchor)| { + let missing_targets = links_with_anchors.filter(|(page, md_path, anchor)| { // There are a few `expect` here since the presence of the .md file will // already have been checked in the markdown rendering let mut full_path = site.base_path.clone(); @@ -54,22 +51,33 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> { for part in md_path.split('/') { full_path.push(part); } - if md_path.contains("_index.md") { - let section = library - .get_section(&full_path) - .expect("Couldn't find section in check_internal_links_with_anchors"); + // NOTE: This will also match _index.foobar.md where foobar is not a language + // as well as any other sring containing "_index." which is now referenced as + // unsupported page path in the docs. + if md_path.contains("_index.") { + let section = library.sections.get(&full_path).unwrap_or_else(|| { + panic!( + "Couldn't find section {} in check_internal_links_with_anchors from page {:?}", + md_path, + page.strip_prefix(&site.base_path).unwrap() + ) + }); !section.has_anchor(anchor) } else { - let page = library - .get_page(&full_path) - .expect("Couldn't find section in check_internal_links_with_anchors"); + let page = library.pages.get(&full_path).unwrap_or_else(|| { + panic!( + "Couldn't find page {} in check_internal_links_with_anchors from page {:?}", + md_path, + page.strip_prefix(&site.base_path).unwrap() + ) + }); - !(page.has_anchor(anchor)||page.has_anchor_id(anchor)) + !(page.has_anchor(anchor) || page.has_anchor_id(anchor)) } }); // Format faulty entries into error messages, and collect them. - let errors = missing_targets + let messages = missing_targets .map(|(page_path, md_path, anchor)| { format!( "The anchor in the link `@/{}#{}` in {} does not exist.", @@ -81,19 +89,20 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> { .collect::>(); // Finally emit a summary, and return overall anchors-checking result. - match errors.len() { - 0 => { - println!("> Successfully checked {} internal link(s) with anchors.", anchors_total); - Ok(()) - } - errors_total => { - println!( - "> Checked {} internal link(s) with anchors: {} target(s) missing.", - anchors_total, errors_total, - ); - Err(Error { kind: ErrorKind::Msg(errors.join("\n")), source: None }) - } + if messages.is_empty() { + println!("> Successfully checked {} internal link(s) with anchors.", anchors_total); + } else { + println!( + "> Checked {} internal link(s) with anchors: {} target(s) missing.", + anchors_total, + messages.len(), + ); } + messages +} + +fn should_skip_by_prefix(link: &str, skip_prefixes: &[String]) -> bool { + skip_prefixes.iter().any(|prefix| link.starts_with(prefix)) } fn get_link_domain(link: &str) -> Result { @@ -106,108 +115,145 @@ fn get_link_domain(link: &str) -> Result { }; } -pub fn check_external_links(site: &Site) -> Result<()> { +/// Checks all external links and returns all the errors that were encountered. +/// Empty vec == all good +pub fn check_external_links(site: &Site) -> Vec { let library = site.library.write().expect("Get lock for check_external_links"); - let mut all_links: Vec<(PathBuf, String, String)> = vec![]; + struct LinkDef { + file_path: PathBuf, + external_link: String, + domain: String, + } - for p in library.pages_values().into_iter() { - for external_link in p.clone().external_links.into_iter() { - let domain = get_link_domain(&external_link)?; - all_links.push((p.file.path.clone(), external_link, domain)); + impl LinkDef { + pub fn new(file_path: &Path, external_link: &str, domain: String) -> Self { + Self { + file_path: file_path.to_path_buf(), + external_link: external_link.to_string(), + domain, + } } } - for s in library.sections_values().into_iter() { - for external_link in s.clone().external_links.into_iter() { - let domain = get_link_domain(&external_link)?; - all_links.push((s.file.path.clone(), external_link, domain)); + let mut messages: Vec = vec![]; + let mut external_links = Vec::new(); + for p in library.pages.values() { + external_links.push((&p.file.path, &p.external_links)); + } + for s in library.sections.values() { + external_links.push((&s.file.path, &s.external_links)); + } + + let mut checked_links: Vec = vec![]; + let mut skipped_link_count: u32 = 0; + let mut invalid_url_links: u32 = 0; + // First we look at all the external links, skip those the user wants to skip and record + // the ones that have invalid URLs + for (file_path, links) in external_links { + for link in links { + if should_skip_by_prefix(link, &site.config.link_checker.skip_prefixes) { + skipped_link_count += 1; + } else { + match get_link_domain(link) { + Ok(domain) => { + checked_links.push(LinkDef::new(file_path, link, domain)); + } + Err(err) => { + // We could use the messages.len() to keep track of them for below + // but it's more explicit this way + invalid_url_links += 1; + messages.push(err.to_string()); + } + } + } } } - println!("Checking {} external link(s).", all_links.len()); + println!( + "Checking {} external link(s). Skipping {} external link(s).{}", + checked_links.len(), + skipped_link_count, + if invalid_url_links == 0 { + "".to_string() + } else { + format!(" {} link(s) had unparseable URLs.", invalid_url_links) + } + ); - let mut links_by_domain: HashMap> = HashMap::new(); + if checked_links.is_empty() { + return Vec::new(); + } - for link in all_links.iter() { - links_by_domain.entry(link.2.to_string()).or_default(); - // Insert content path and link under the domain key - links_by_domain - .get_mut(&link.2.to_string()) - .unwrap() - .push((link.0.clone(), link.1.clone())); + // error out if we're in error mode and any external URLs couldn't be parsed + if site.config.link_checker.external_level == LinkCheckerLevel::Error && !messages.is_empty() { + return messages; } - if all_links.is_empty() { - return Ok(()); + let mut links_by_domain: HashMap<&str, Vec<&LinkDef>> = HashMap::new(); + for link in checked_links.iter() { + if links_by_domain.contains_key(link.domain.as_str()) { + links_by_domain.get_mut(link.domain.as_str()).unwrap().push(link); + } else { + links_by_domain.insert(link.domain.as_str(), vec![link]); + } } // create thread pool with lots of threads so we can fetch // (almost) all pages simultaneously, limiting all links for a single // domain to one thread to avoid rate-limiting let threads = std::cmp::min(links_by_domain.len(), 8); - let pool = rayon::ThreadPoolBuilder::new() - .num_threads(threads) - .build() - .map_err(|e| Error { kind: ErrorKind::Msg(e.to_string()), source: None })?; - - let errors = pool.install(|| { - links_by_domain - .par_iter() - .map(|(_domain, links)| { - let mut links_to_process = links.len(); - links - .iter() - .filter_map(move |(page_path, link)| { - links_to_process -= 1; - - if site - .config - .link_checker - .skip_prefixes + match rayon::ThreadPoolBuilder::new().num_threads(threads).build() { + Ok(pool) => { + let errors = pool.install(|| { + links_by_domain + .par_iter() + .map(|(_, links)| { + let mut num_links_left = links.len(); + links .iter() - .any(|prefix| link.starts_with(prefix)) - { - return None; - } - - let res = link_checker::check_url(link, &site.config.link_checker); - - if links_to_process > 0 { - // Prevent rate-limiting, wait before next crawl unless we're done with this domain - thread::sleep(time::Duration::from_millis(500)); - } - - if link_checker::is_valid(&res) { - None - } else { - Some((page_path, link, res)) - } + .filter_map(move |link_def| { + num_links_left -= 1; + + let res = link_checker::check_url( + &link_def.external_link, + &site.config.link_checker, + ); + + if num_links_left > 0 { + // Prevent rate-limiting, wait before next crawl unless we're done with this domain + thread::sleep(time::Duration::from_millis(500)); + } + + if link_checker::is_valid(&res) { + None + } else { + Some((&link_def.file_path, &link_def.external_link, res)) + } + }) + .collect::>() }) + .flatten() .collect::>() - }) - .flatten() - .collect::>() - }); + }); - println!("> Checked {} external link(s): {} error(s) found.", all_links.len(), errors.len()); + println!( + "> Checked {} external link(s): {} error(s) found.", + checked_links.len(), + errors.len() + ); - if errors.is_empty() { - return Ok(()); + for (page_path, link, check_res) in errors { + messages.push(format!( + "Broken link in {} to {}: {}", + page_path.to_string_lossy(), + link, + link_checker::message(&check_res) + )); + } + } + Err(pool_err) => messages.push(pool_err.to_string()), } - let msg = errors - .into_iter() - .map(|(page_path, link, check_res)| { - format!( - "Dead link in {} to {}: {}", - page_path.to_string_lossy(), - link, - link_checker::message(&check_res) - ) - }) - .collect::>() - .join("\n"); - - Err(Error { kind: ErrorKind::Msg(msg), source: None }) + messages } diff --git a/components/utils/src/minify.rs b/components/site/src/minify.rs similarity index 97% rename from components/utils/src/minify.rs rename to components/site/src/minify.rs index 75160d530b..46a48f4808 100644 --- a/components/utils/src/minify.rs +++ b/components/site/src/minify.rs @@ -1,5 +1,7 @@ use errors::{bail, Result}; -use minify_html::{minify, Cfg}; +use libs::minify_html::{minify, Cfg}; + +// TODO: move to site pub fn html(html: String) -> Result { let mut cfg = Cfg::spec_compliant(); diff --git a/components/site/src/sass.rs b/components/site/src/sass.rs index df0d373ac6..ab6bb55695 100644 --- a/components/site/src/sass.rs +++ b/components/site/src/sass.rs @@ -1,9 +1,10 @@ use std::fs::create_dir_all; use std::path::{Path, PathBuf}; -use glob::glob; -use sass_rs::{compile_file, Options, OutputStyle}; +use libs::glob::glob; +use libs::sass_rs::{compile_file, Options, OutputStyle}; +use crate::anyhow; use errors::{bail, Result}; use utils::fs::{create_file, ensure_directory_exists}; @@ -47,7 +48,7 @@ fn compile_sass_glob( let mut compiled_paths = Vec::new(); for file in files { - let css = compile_file(&file, options.clone())?; + let css = compile_file(&file, options.clone()).map_err(|e| anyhow!(e))?; let path_inside_sass = file.strip_prefix(&sass_path).unwrap(); let parent_inside_sass = path_inside_sass.parent(); diff --git a/components/site/src/sitemap.rs b/components/site/src/sitemap.rs index c58c749907..360fc5ae74 100644 --- a/components/site/src/sitemap.rs +++ b/components/site/src/sitemap.rs @@ -2,19 +2,19 @@ use std::borrow::Cow; use std::collections::HashSet; use std::hash::{Hash, Hasher}; -use serde_derive::Serialize; +use serde::Serialize; use config::Config; -use library::{Library, Taxonomy}; +use content::{Library, Taxonomy}; +use libs::tera::{Map, Value}; use std::cmp::Ordering; -use tera::{Map, Value}; /// The sitemap only needs links, potentially date and extra for pages in case of updates /// for examples so we trim down all entries to only that #[derive(Debug, Serialize)] pub struct SitemapEntry<'a> { pub permalink: Cow<'a, str>, - pub updated: Option, + pub updated: &'a Option, pub extra: Option<&'a Map>, } @@ -33,7 +33,7 @@ impl<'a> PartialEq for SitemapEntry<'a> { impl<'a> Eq for SitemapEntry<'a> {} impl<'a> SitemapEntry<'a> { - pub fn new(permalink: Cow<'a, str>, updated: Option) -> Self { + pub fn new(permalink: Cow<'a, str>, updated: &'a Option) -> Self { SitemapEntry { permalink, updated, extra: None } } @@ -61,49 +61,44 @@ pub fn find_entries<'a>( taxonomies: &'a [Taxonomy], config: &'a Config, ) -> Vec> { - let pages = library - .pages_values() - .iter() - .map(|p| { - let mut entry = SitemapEntry::new( - Cow::Borrowed(&p.permalink), - p.meta.updated.clone().or_else(|| p.meta.date.clone()), - ); - entry.add_extra(&p.meta.extra); - entry - }) - .collect::>(); - - let mut sections = library - .sections_values() - .iter() - .filter(|s| s.meta.render) - .map(|s| { - let mut entry = SitemapEntry::new(Cow::Borrowed(&s.permalink), None); + let mut entries = HashSet::new(); + + for p in library.pages.values() { + let mut entry = SitemapEntry::new( + Cow::Borrowed(&p.permalink), + if p.meta.updated.is_some() { &p.meta.updated } else { &p.meta.date }, + ); + entry.add_extra(&p.meta.extra); + entries.insert(entry); + } + + for s in library.sections.values() { + if s.meta.render { + let mut entry = SitemapEntry::new(Cow::Borrowed(&s.permalink), &None); entry.add_extra(&s.meta.extra); - entry - }) - .collect::>(); + entries.insert(entry); + } - for section in library.sections_values().iter() { - if let Some(paginate_by) = section.paginate_by() { - let number_pagers = (section.pages.len() as f64 / paginate_by as f64).ceil() as isize; + if let Some(paginate_by) = s.paginate_by() { + let number_pagers = (s.pages.len() as f64 / paginate_by as f64).ceil() as isize; for i in 1..=number_pagers { - let permalink = - format!("{}{}/{}/", section.permalink, section.meta.paginate_path, i); - sections.push(SitemapEntry::new(Cow::Owned(permalink), None)) + let permalink = format!("{}{}/{}/", s.permalink, s.meta.paginate_path, i); + entries.insert(SitemapEntry::new(Cow::Owned(permalink), &None)); } } } - let mut taxonomies_entries = vec![]; for taxonomy in taxonomies { + if !taxonomy.kind.render { + continue; + } let name = &taxonomy.kind.name; - let mut terms = vec![SitemapEntry::new(Cow::Owned(config.make_permalink(name)), None)]; + entries.insert(SitemapEntry::new(Cow::Owned(config.make_permalink(name)), &None)); + for item in &taxonomy.items { - terms.push(SitemapEntry::new( + entries.insert(SitemapEntry::new( Cow::Owned(config.make_permalink(&format!("{}/{}", name, item.slug))), - None, + &None, )); if taxonomy.kind.is_paginated() { @@ -118,28 +113,13 @@ pub fn find_entries<'a>( taxonomy.kind.paginate_path(), i )); - terms.push(SitemapEntry::new(Cow::Owned(permalink), None)) + entries.insert(SitemapEntry::new(Cow::Owned(permalink), &None)); } } } - - taxonomies_entries.push(terms); - } - - let mut all_sitemap_entries = HashSet::new(); - for p in pages { - all_sitemap_entries.insert(p); - } - for s in sections { - all_sitemap_entries.insert(s); - } - for terms in taxonomies_entries { - for term in terms { - all_sitemap_entries.insert(term); - } } - let mut entries = all_sitemap_entries.into_iter().collect::>(); + let mut entries = entries.into_iter().collect::>(); entries.sort(); entries } diff --git a/components/site/src/tpls.rs b/components/site/src/tpls.rs index 1abee3b2d8..98d8066534 100644 --- a/components/site/src/tpls.rs +++ b/components/site/src/tpls.rs @@ -1,17 +1,9 @@ use crate::Site; +use libs::tera::Result as TeraResult; use templates::{filters, global_fns}; -use tera::Result as TeraResult; /// Adds global fns that are to be available to shortcodes while rendering markdown pub fn register_early_global_fns(site: &mut Site) -> TeraResult<()> { - site.tera.register_filter( - "markdown", - filters::MarkdownFilter::new( - site.base_path.clone(), - site.config.clone(), - site.permalinks.clone(), - )?, - ); site.tera.register_filter( "num_format", filters::NumFormatFilter::new(&site.config.default_language), @@ -69,6 +61,15 @@ pub fn register_early_global_fns(site: &mut Site) -> TeraResult<()> { ), ); + site.tera.register_filter( + "markdown", + filters::MarkdownFilter::new( + site.config.clone(), + site.permalinks.clone(), + site.tera.clone(), + ), + ); + Ok(()) } diff --git a/components/site/tests/common.rs b/components/site/tests/common.rs index 19297f4f04..f5caaf1824 100644 --- a/components/site/tests/common.rs +++ b/components/site/tests/common.rs @@ -99,12 +99,12 @@ fn find_lang_for(entry: &Path, base_dir: &Path) -> Option<(String, Option path_without_prefix.to_slash_lossy(), _ => unified_path.to_slash_lossy(), }; - Some((unified_path_str, Some(lang.to_str().unwrap().into()))) + Some((unified_path_str.to_string(), Some(lang.to_str().unwrap().into()))) } else { // No lang, return no_ext directly let mut no_ext_string = match no_ext.strip_prefix(base_dir) { - Ok(path_without_prefix) => path_without_prefix.to_slash_lossy(), - _ => no_ext.to_slash_lossy(), + Ok(path_without_prefix) => path_without_prefix.to_slash_lossy().to_string(), + _ => no_ext.to_slash_lossy().to_string(), }; no_ext_string.push_str(".md"); Some((no_ext_string, None)) @@ -186,16 +186,16 @@ impl Translations { let library = site.library.clone(); let library = library.read().unwrap(); // WORKAROUND because site.content_path is private - let unified_path = if let Some(page) = - library.get_page(site.base_path.join("content").join(path)) - { - page.file.canonical.clone() - } else if let Some(section) = library.get_section(site.base_path.join("content").join(path)) - { - section.file.canonical.clone() - } else { - panic!("No such page or section: {}", path); - }; + let unified_path = + if let Some(page) = library.pages.get(&site.base_path.join("content").join(path)) { + page.file.canonical.clone() + } else if let Some(section) = + library.sections.get(&site.base_path.join("content").join(path)) + { + section.file.canonical.clone() + } else { + panic!("No such page or section: {}", path); + }; let translations = library.translations.get(&unified_path); if translations.is_none() { @@ -213,14 +213,14 @@ impl Translations { // Are we looking for a section? (no file extension here) if unified_path.ends_with("_index") { //library.get_section_by_key(*key).file.relative.to_string() - let section = library.get_section_by_key(*key); + let section = &library.sections[key]; Translation { lang: section.lang.clone(), permalink: section.permalink.clone(), path: section.file.path.to_str().unwrap().to_string(), } } else { - let page = library.get_page_by_key(*key); + let page = &library.pages[key]; Translation { lang: page.lang.clone(), permalink: page.permalink.clone(), diff --git a/components/site/tests/site.rs b/components/site/tests/site.rs index 2511af0ce7..b2cce71b9d 100644 --- a/components/site/tests/site.rs +++ b/components/site/tests/site.rs @@ -2,10 +2,12 @@ mod common; use std::collections::HashMap; use std::env; -use std::path::Path; +use std::path::{Path, PathBuf}; use common::{build_site, build_site_with_setup}; -use config::Taxonomy; +use config::TaxonomyConfig; +use content::Page; +use libs::ahash::AHashMap; use site::sitemap; use site::Site; @@ -19,71 +21,67 @@ fn can_parse_site() { let library = site.library.read().unwrap(); // Correct number of pages (sections do not count as pages, draft are ignored) - assert_eq!(library.pages().len(), 32); + assert_eq!(library.pages.len(), 33); let posts_path = path.join("content").join("posts"); // Make sure the page with a url doesn't have any sections - let url_post = library.get_page(&posts_path.join("fixed-url.md")).unwrap(); + let url_post = library.pages.get(&posts_path.join("fixed-url.md")).unwrap(); assert_eq!(url_post.path, "/a-fixed-url/"); // Make sure the article in a folder with only asset doesn't get counted as a section let asset_folder_post = - library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap(); + library.pages.get(&posts_path.join("with-assets").join("index.md")).unwrap(); assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]); // That we have the right number of sections - assert_eq!(library.sections().len(), 12); + assert_eq!(library.sections.len(), 12); // And that the sections are correct - let index_section = library.get_section(&path.join("content").join("_index.md")).unwrap(); + let index_section = library.sections.get(&path.join("content").join("_index.md")).unwrap(); assert_eq!(index_section.subsections.len(), 5); assert_eq!(index_section.pages.len(), 3); assert!(index_section.ancestors.is_empty()); - let posts_section = library.get_section(&posts_path.join("_index.md")).unwrap(); + let posts_section = library.sections.get(&posts_path.join("_index.md")).unwrap(); assert_eq!(posts_section.subsections.len(), 2); assert_eq!(posts_section.pages.len(), 9); // 10 with 1 draft == 9 - assert_eq!( - posts_section.ancestors, - vec![*library.get_section_key(&index_section.file.path).unwrap()] - ); + assert_eq!(posts_section.ancestors, vec![index_section.file.relative.clone()]); // Make sure we remove all the pwd + content from the sections - let basic = library.get_page(&posts_path.join("simple.md")).unwrap(); + let basic = library.pages.get(&posts_path.join("simple.md")).unwrap(); assert_eq!(basic.file.components, vec!["posts".to_string()]); assert_eq!( basic.ancestors, - vec![ - *library.get_section_key(&index_section.file.path).unwrap(), - *library.get_section_key(&posts_section.file.path).unwrap(), - ] + vec![index_section.file.relative.clone(), posts_section.file.relative.clone(),] ); let tutorials_section = - library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap(); + library.sections.get(&posts_path.join("tutorials").join("_index.md")).unwrap(); assert_eq!(tutorials_section.subsections.len(), 2); - let sub1 = library.get_section_by_key(tutorials_section.subsections[0]); - let sub2 = library.get_section_by_key(tutorials_section.subsections[1]); + let sub1 = &library.sections[&tutorials_section.subsections[0]]; + let sub2 = &library.sections[&tutorials_section.subsections[1]]; assert_eq!(sub1.clone().meta.title.unwrap(), "Programming"); assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps"); assert_eq!(tutorials_section.pages.len(), 0); let devops_section = library - .get_section(&posts_path.join("tutorials").join("devops").join("_index.md")) + .sections + .get(&posts_path.join("tutorials").join("devops").join("_index.md")) .unwrap(); assert_eq!(devops_section.subsections.len(), 0); assert_eq!(devops_section.pages.len(), 2); assert_eq!( devops_section.ancestors, vec![ - *library.get_section_key(&index_section.file.path).unwrap(), - *library.get_section_key(&posts_section.file.path).unwrap(), - *library.get_section_key(&tutorials_section.file.path).unwrap(), + index_section.file.relative.clone(), + posts_section.file.relative.clone(), + tutorials_section.file.relative.clone(), ] ); let prog_section = library - .get_section(&posts_path.join("tutorials").join("programming").join("_index.md")) + .sections + .get(&posts_path.join("tutorials").join("programming").join("_index.md")) .unwrap(); assert_eq!(prog_section.subsections.len(), 0); assert_eq!(prog_section.pages.len(), 2); @@ -102,6 +100,21 @@ fn can_parse_site() { assert_eq!(Some(&prog_section.meta.extra), sitemap_entry.extra); } +#[test] +fn errors_on_unknown_taxonomies() { + let (mut site, _, _) = build_site("test_site"); + let mut page = Page::default(); + page.file.path = PathBuf::from("unknown/taxo.md"); + page.meta.taxonomies.insert("wrong".to_string(), vec![]); + let res = site.add_page(page, false); + assert!(res.is_err()); + let err = res.unwrap_err(); + assert_eq!( + err.to_string(), + "Page `unknown/taxo.md` has taxonomy `wrong` which is not defined in config.toml" + ); +} + #[test] fn can_build_site_without_live_reload() { let (_, _tmp_dir, public) = build_site("test_site"); @@ -259,7 +272,7 @@ fn can_build_site_with_live_reload_and_drafts() { // drafted sections are included let library = site.library.read().unwrap(); - assert_eq!(library.sections().len(), 14); + assert_eq!(library.sections.len(), 14); assert!(file_exists!(public, "secret_section/index.html")); assert!(file_exists!(public, "secret_section/draft-page/index.html")); @@ -272,8 +285,11 @@ fn can_build_site_with_taxonomies() { let (site, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| { site.load().unwrap(); { - let mut library = site.library.write().unwrap(); - for (i, (_, page)) in library.pages_mut().iter_mut().enumerate() { + let library = &mut *site.library.write().unwrap(); + let mut pages = vec![]; + + let pages_data = std::mem::replace(&mut library.pages, AHashMap::new()); + for (i, (_, mut page)) in pages_data.into_iter().enumerate() { page.meta.taxonomies = { let mut taxonomies = HashMap::new(); taxonomies.insert( @@ -282,6 +298,10 @@ fn can_build_site_with_taxonomies() { ); taxonomies }; + pages.push(page); + } + for p in pages { + library.insert_page(p); } } site.populate_taxonomies().unwrap(); @@ -289,7 +309,7 @@ fn can_build_site_with_taxonomies() { }); assert!(&public.exists()); - assert_eq!(site.taxonomies.len(), 1); + assert_eq!(site.taxonomies.len(), 2); assert!(file_exists!(public, "index.html")); assert!(file_exists!(public, "sitemap.xml")); @@ -353,7 +373,7 @@ fn can_build_site_with_pagination_for_section() { site.load().unwrap(); { let mut library = site.library.write().unwrap(); - for (_, section) in library.sections_mut() { + for (_, section) in library.sections.iter_mut() { if section.is_index() { continue; } @@ -481,7 +501,8 @@ fn can_build_site_with_pagination_for_index() { let mut library = site.library.write().unwrap(); { let index = library - .get_section_mut(&site.base_path.join("content").join("_index.md")) + .sections + .get_mut(&site.base_path.join("content").join("_index.md")) .unwrap(); index.meta.paginate_by = Some(2); index.meta.template = Some("index_paginated.html".to_string()); @@ -544,17 +565,21 @@ fn can_build_site_with_pagination_for_index() { #[test] fn can_build_site_with_pagination_for_taxonomy() { let (_, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| { - site.config.taxonomies.push(Taxonomy { + site.config.languages.get_mut("en").unwrap().taxonomies.push(TaxonomyConfig { name: "tags".to_string(), + slug: "tags".to_string(), paginate_by: Some(2), paginate_path: None, + render: true, feed: true, }); site.load().unwrap(); { - let mut library = site.library.write().unwrap(); + let library = &mut *site.library.write().unwrap(); + let mut pages = vec![]; - for (i, (_, page)) in library.pages_mut().iter_mut().enumerate() { + let pages_data = std::mem::replace(&mut library.pages, AHashMap::new()); + for (i, (_, mut page)) in pages_data.into_iter().enumerate() { page.meta.taxonomies = { let mut taxonomies = HashMap::new(); taxonomies.insert( @@ -563,6 +588,10 @@ fn can_build_site_with_pagination_for_taxonomy() { ); taxonomies }; + pages.push(page); + } + for p in pages { + library.insert_page(p); } } site.populate_taxonomies().unwrap(); @@ -596,7 +625,7 @@ fn can_build_site_with_pagination_for_taxonomy() { "tags/a/page/1/index.html", "http-equiv=\"refresh\" content=\"0; url=https://replace-this-with-your-url.com/tags/a/\"" )); - assert!(file_contains!(public, "tags/a/index.html", "Num pagers: 8")); + assert!(file_contains!(public, "tags/a/index.html", "Num pagers: 9")); assert!(file_contains!(public, "tags/a/index.html", "Page size: 2")); assert!(file_contains!(public, "tags/a/index.html", "Current index: 1")); assert!(!file_contains!(public, "tags/a/index.html", "has_prev")); @@ -609,7 +638,7 @@ fn can_build_site_with_pagination_for_taxonomy() { assert!(file_contains!( public, "tags/a/index.html", - "Last: https://replace-this-with-your-url.com/tags/a/page/8/" + "Last: https://replace-this-with-your-url.com/tags/a/page/9/" )); assert!(!file_contains!(public, "tags/a/index.html", "has_prev")); @@ -678,35 +707,35 @@ fn can_apply_page_templates() { let template_path = path.join("content").join("applying_page_template"); let library = site.library.read().unwrap(); - let template_section = library.get_section(&template_path.join("_index.md")).unwrap(); + let template_section = library.sections.get(&template_path.join("_index.md")).unwrap(); assert_eq!(template_section.subsections.len(), 2); assert_eq!(template_section.pages.len(), 2); - let from_section_config = library.get_page_by_key(template_section.pages[0]); + let from_section_config = &library.pages[&template_section.pages[0]]; assert_eq!(from_section_config.meta.template, Some("page_template.html".into())); assert_eq!(from_section_config.meta.title, Some("From section config".into())); - let override_page_template = library.get_page_by_key(template_section.pages[1]); + let override_page_template = &library.pages[&template_section.pages[1]]; assert_eq!(override_page_template.meta.template, Some("page_template_override.html".into())); assert_eq!(override_page_template.meta.title, Some("Override".into())); // It should have applied recursively as well let another_section = - library.get_section(&template_path.join("another_section").join("_index.md")).unwrap(); + library.sections.get(&template_path.join("another_section").join("_index.md")).unwrap(); assert_eq!(another_section.subsections.len(), 0); assert_eq!(another_section.pages.len(), 1); - let changed_recursively = library.get_page_by_key(another_section.pages[0]); + let changed_recursively = &library.pages[&another_section.pages[0]]; assert_eq!(changed_recursively.meta.template, Some("page_template.html".into())); assert_eq!(changed_recursively.meta.title, Some("Changed recursively".into())); // But it should not have override a children page_template let yet_another_section = - library.get_section(&template_path.join("yet_another_section").join("_index.md")).unwrap(); + library.sections.get(&template_path.join("yet_another_section").join("_index.md")).unwrap(); assert_eq!(yet_another_section.subsections.len(), 0); assert_eq!(yet_another_section.pages.len(), 1); - let child = library.get_page_by_key(yet_another_section.pages[0]); + let child = &library.pages[&yet_another_section.pages[0]]; assert_eq!(child.meta.template, Some("page_template_child.html".into())); assert_eq!(child.meta.title, Some("Local section override".into())); } @@ -767,15 +796,42 @@ fn can_get_hash_for_static_files() { } #[test] -fn check_site() { +fn can_check_site() { let (mut site, _tmp_dir, _public) = build_site("test_site"); assert_eq!( site.config.link_checker.skip_anchor_prefixes, vec!["https://github.com/rust-lang/rust/blob/"] ); + assert_eq!( + site.config.link_checker.skip_prefixes, + vec!["http://[2001:db8::]/", "http://invaliddomain"] + ); + + site.config.enable_check_mode(); + site.load().expect("link check test_site"); +} + +#[test] +#[should_panic] +fn panics_on_invalid_external_domain() { + let (mut site, _tmp_dir, _public) = build_site("test_site"); + + // remove the invalid domain skip prefix + let i = site + .config + .link_checker + .skip_prefixes + .iter() + .position(|prefix| prefix == "http://invaliddomain") + .unwrap(); + site.config.link_checker.skip_prefixes.remove(i); + + // confirm the invalid domain skip prefix was removed assert_eq!(site.config.link_checker.skip_prefixes, vec!["http://[2001:db8::]/"]); + // check the test site, this time without the invalid domain skip prefix, which should cause a + // panic site.config.enable_check_mode(); site.load().expect("link check test_site"); } diff --git a/components/site/tests/site_i18n.rs b/components/site/tests/site_i18n.rs index c57b6cc5e3..d46ff1a37e 100644 --- a/components/site/tests/site_i18n.rs +++ b/components/site/tests/site_i18n.rs @@ -14,43 +14,38 @@ fn can_parse_multilingual_site() { site.load().unwrap(); let library = site.library.read().unwrap(); - assert_eq!(library.pages().len(), 11); - assert_eq!(library.sections().len(), 6); + assert_eq!(library.pages.len(), 11); + assert_eq!(library.sections.len(), 6); // default index sections let default_index_section = - library.get_section(&path.join("content").join("_index.md")).unwrap(); + library.sections.get(&path.join("content").join("_index.md")).unwrap(); assert_eq!(default_index_section.pages.len(), 1); assert!(default_index_section.ancestors.is_empty()); - let fr_index_section = library.get_section(&path.join("content").join("_index.fr.md")).unwrap(); + let fr_index_section = + library.sections.get(&path.join("content").join("_index.fr.md")).unwrap(); assert_eq!(fr_index_section.pages.len(), 1); assert!(fr_index_section.ancestors.is_empty()); // blog sections get only their own language pages let blog_path = path.join("content").join("blog"); - let default_blog = library.get_section(&blog_path.join("_index.md")).unwrap(); + let default_blog = library.sections.get(&blog_path.join("_index.md")).unwrap(); assert_eq!(default_blog.subsections.len(), 0); assert_eq!(default_blog.pages.len(), 4); - assert_eq!( - default_blog.ancestors, - vec![*library.get_section_key(&default_index_section.file.path).unwrap()] - ); + assert_eq!(default_blog.ancestors, vec![default_index_section.file.relative.clone()]); for key in &default_blog.pages { - let page = library.get_page_by_key(*key); + let page = &library.pages[key]; assert_eq!(page.lang, "en"); } - let fr_blog = library.get_section(&blog_path.join("_index.fr.md")).unwrap(); + let fr_blog = library.sections.get(&blog_path.join("_index.fr.md")).unwrap(); assert_eq!(fr_blog.subsections.len(), 0); - assert_eq!(fr_blog.pages.len(), 3); - assert_eq!( - fr_blog.ancestors, - vec![*library.get_section_key(&fr_index_section.file.path).unwrap()] - ); + assert_eq!(fr_blog.pages.len(), 4); + assert_eq!(fr_blog.ancestors, vec![fr_index_section.file.relative.clone()]); for key in &fr_blog.pages { - let page = library.get_page_by_key(*key); + let page = &library.pages[key]; assert_eq!(page.lang, "fr"); } } @@ -163,7 +158,12 @@ fn can_build_multilingual_site() { assert!(file_exists!(public, "fr/auteurs/index.html")); assert!(!file_contains!(public, "fr/auteurs/index.html", "Queen")); assert!(file_contains!(public, "fr/auteurs/index.html", "Vincent")); - assert!(!file_exists!(public, "fr/auteurs/vincent-prouillet/atom.xml")); + assert!(file_exists!(public, "fr/auteurs/vincent-prouillet/atom.xml")); + assert!(file_contains!( + public, + "fr/auteurs/vincent-prouillet/atom.xml", + r#""# + )); assert!(file_exists!(public, "fr/tags/index.html")); assert!(file_contains!(public, "fr/tags/index.html", "bonjour")); @@ -188,6 +188,7 @@ fn correct_translations_on_all_pages() { let link = format!("{}index.html", link); // Ensure every permalink has produced a HTML page + println!("{:?}", link); assert!(ensure_output_exists(&public, &site.config.base_url, &link)); // Ensure translations expected here match with those in the library diff --git a/components/templates/Cargo.toml b/components/templates/Cargo.toml index 6ee0cca870..147fed89b0 100644 --- a/components/templates/Cargo.toml +++ b/components/templates/Cargo.toml @@ -1,39 +1,18 @@ [package] name = "templates" version = "0.1.0" -authors = ["Vincent Prouillet "] -edition = "2018" +edition = "2021" [dependencies] -tera = "1" -base64 = "0.13" -lazy_static = "1" -toml = "0.5" -csv = "1" -serde = "1" -serde_json = "1" -serde_derive = "1" -sha2 = "0.9" -url = "2" -nom-bibtex = "0.3" -num-format = "0.4" - errors = { path = "../errors" } utils = { path = "../utils" } -library = { path = "../library" } +content = { path = "../content" } config = { path = "../config" } imageproc = { path = "../imageproc" } -rendering = { path = "../rendering" } +markdown = { path = "../markdown" } +libs = { path = "../libs" } -[dependencies.reqwest] -version = "0.11" -default-features = false -features = ["blocking"] [dev-dependencies] -mockito = "0.30" +mockito = "0.31" tempfile = "3" - -[features] -rust-tls = ["reqwest/rustls-tls"] -native-tls = ["reqwest/default-tls"] diff --git a/components/templates/src/builtins/atom.xml b/components/templates/src/builtins/atom.xml index 3ebacb9b6a..e515636d06 100644 --- a/components/templates/src/builtins/atom.xml +++ b/components/templates/src/builtins/atom.xml @@ -24,7 +24,7 @@ {{ page.title }} {{ page.date | date(format="%+") }} {{ page.updated | default(value=page.date) | date(format="%+") }} - + {{ page.permalink | safe }} {{ page.content }} diff --git a/components/templates/src/builtins/robots.txt b/components/templates/src/builtins/robots.txt index b345a1a10c..b6ac790cee 100644 --- a/components/templates/src/builtins/robots.txt +++ b/components/templates/src/builtins/robots.txt @@ -1,3 +1,4 @@ User-agent: * +Disallow: Allow: / Sitemap: {{ get_url(path="sitemap.xml") }} diff --git a/components/templates/src/filters.rs b/components/templates/src/filters.rs index 9acb03b50a..4360b46a1f 100644 --- a/components/templates/src/filters.rs +++ b/components/templates/src/filters.rs @@ -1,17 +1,14 @@ use std::borrow::Cow; use std::collections::HashMap; use std::hash::BuildHasher; -use std::path::PathBuf; -use base64::{decode, encode}; use config::Config; -use rendering::{render_content, RenderContext}; -use tera::{ +use libs::base64::{decode, encode}; +use libs::tera::{ to_value, try_get_value, Error as TeraError, Filter as TeraFilter, Result as TeraResult, Tera, Value, }; - -use crate::load_tera; +use markdown::{render_content, RenderContext}; #[derive(Debug)] pub struct MarkdownFilter { @@ -21,13 +18,8 @@ pub struct MarkdownFilter { } impl MarkdownFilter { - pub fn new( - path: PathBuf, - config: Config, - permalinks: HashMap, - ) -> TeraResult { - let tera = load_tera(&path, &config).map_err(tera::Error::msg)?; - Ok(Self { config, permalinks, tera }) + pub fn new(config: Config, permalinks: HashMap, tera: Tera) -> Self { + Self { config, permalinks, tera } } } @@ -94,7 +86,7 @@ impl NumFormatFilter { impl TeraFilter for NumFormatFilter { fn filter(&self, value: &Value, args: &HashMap) -> TeraResult { - use num_format::{Locale, ToFormattedString}; + use libs::num_format::{Locale, ToFormattedString}; let num = try_get_value!("num_format", "value", i64, value); let locale = match args.get("locale") { @@ -113,17 +105,16 @@ impl TeraFilter for NumFormatFilter { #[cfg(test)] mod tests { - use std::{collections::HashMap, path::PathBuf}; + use std::collections::HashMap; - use tera::{to_value, Filter}; + use libs::tera::{to_value, Filter, Tera}; use super::{base64_decode, base64_encode, MarkdownFilter, NumFormatFilter}; use config::Config; #[test] fn markdown_filter() { - let result = MarkdownFilter::new(PathBuf::new(), Config::default(), HashMap::new()) - .unwrap() + let result = MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default()) .filter(&to_value(&"# Hey").unwrap(), &HashMap::new()); assert!(result.is_ok()); assert_eq!(result.unwrap(), to_value(&"

Hey

\n").unwrap()); @@ -137,8 +128,7 @@ mod tests { let args = HashMap::new(); let config = Config::default(); let permalinks = HashMap::new(); - let mut tera = - super::load_tera(&PathBuf::new(), &config).map_err(tera::Error::msg).unwrap(); + let mut tera = Tera::default(); tera.add_raw_template("shortcodes/explicitlang.html", "a{{ lang }}a").unwrap(); let filter = MarkdownFilter { config, permalinks, tera }; let result = filter.filter(&to_value(&"{{ explicitlang(lang='jp') }}").unwrap(), &args); @@ -151,8 +141,8 @@ mod tests { fn markdown_filter_inline() { let mut args = HashMap::new(); args.insert("inline".to_string(), to_value(true).unwrap()); - let result = - MarkdownFilter::new(PathBuf::new(), Config::default(), HashMap::new()).unwrap().filter( + let result = MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default()) + .filter( &to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(), &args, ); @@ -165,8 +155,8 @@ mod tests { fn markdown_filter_inline_tables() { let mut args = HashMap::new(); args.insert("inline".to_string(), to_value(true).unwrap()); - let result = - MarkdownFilter::new(PathBuf::new(), Config::default(), HashMap::new()).unwrap().filter( + let result = MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default()) + .filter( &to_value( &r#" |id|author_id| timestamp_created|title |content | @@ -191,15 +181,13 @@ mod tests { config.markdown.external_links_target_blank = true; let md = "Hello :smile: ..."; - let result = MarkdownFilter::new(PathBuf::new(), config.clone(), HashMap::new()) - .unwrap() + let result = MarkdownFilter::new(config.clone(), HashMap::new(), Tera::default()) .filter(&to_value(&md).unwrap(), &HashMap::new()); assert!(result.is_ok()); assert_eq!(result.unwrap(), to_value(&"

Hello https://google.com 😄 …

\n").unwrap()); let md = "```py\ni=0\n```"; - let result = MarkdownFilter::new(PathBuf::new(), config, HashMap::new()) - .unwrap() + let result = MarkdownFilter::new(config, HashMap::new(), Tera::default()) .filter(&to_value(&md).unwrap(), &HashMap::new()); assert!(result.is_ok()); assert!(result.unwrap().as_str().unwrap().contains("style")); @@ -210,8 +198,7 @@ mod tests { let mut permalinks = HashMap::new(); permalinks.insert("blog/_index.md".to_string(), "/foo/blog".to_string()); let md = "Hello. Check out [my blog](@/blog/_index.md)!"; - let result = MarkdownFilter::new(PathBuf::new(), Config::default(), permalinks) - .unwrap() + let result = MarkdownFilter::new(Config::default(), permalinks, Tera::default()) .filter(&to_value(&md).unwrap(), &HashMap::new()); assert!(result.is_ok()); assert_eq!( diff --git a/components/templates/src/global_fns/content.rs b/components/templates/src/global_fns/content.rs index fafb226278..a4555b42a9 100644 --- a/components/templates/src/global_fns/content.rs +++ b/components/templates/src/global_fns/content.rs @@ -1,8 +1,8 @@ -use library::{Library, Taxonomy}; +use content::{Library, Taxonomy}; +use libs::tera::{from_value, to_value, Function as TeraFn, Result, Value}; use std::collections::HashMap; use std::path::PathBuf; use std::sync::{Arc, RwLock}; -use tera::{from_value, to_value, Function as TeraFn, Result, Value}; use utils::slugs::{slugify_paths, SlugifyStrategy}; #[derive(Debug)] @@ -90,8 +90,8 @@ impl TeraFn for GetPage { ); let full_path = self.base_path.join(&path); let library = self.library.read().unwrap(); - match library.get_page(&full_path) { - Some(p) => Ok(to_value(p.to_serialized(&library)).unwrap()), + match library.pages.get(&full_path) { + Some(p) => Ok(to_value(p.serialize(&library)).unwrap()), None => Err(format!("Page `{}` not found.", path).into()), } } @@ -122,12 +122,12 @@ impl TeraFn for GetSection { let full_path = self.base_path.join(&path); let library = self.library.read().unwrap(); - match library.get_section(&full_path) { + match library.sections.get(&full_path) { Some(s) => { if metadata_only { - Ok(to_value(s.to_serialized_basic(&library)).unwrap()) + Ok(to_value(s.serialize_basic(&library)).unwrap()) } else { - Ok(to_value(s.to_serialized(&library)).unwrap()) + Ok(to_value(s.serialize(&library)).unwrap()) } } None => Err(format!("Section `{}` not found.", path).into()), @@ -185,45 +185,34 @@ impl TeraFn for GetTaxonomy { #[cfg(test)] mod tests { use super::*; - use config::{Config, Taxonomy as TaxonomyConfig}; - use library::TaxonomyItem; + use config::{Config, TaxonomyConfig}; + use content::TaxonomyTerm; #[test] fn can_get_taxonomy() { - let mut config = Config::default(); + let mut config = Config::default_for_test(); config.slugify.taxonomies = SlugifyStrategy::On; let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; let taxo_config_fr = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; - let library = Arc::new(RwLock::new(Library::new(0, 0, false))); - let tag = TaxonomyItem::new( - "Programming", - &config.default_language, - "tags", - &config, - vec![], - &library.read().unwrap(), - ); - let tag_fr = TaxonomyItem::new( - "Programmation", - "fr", - "tags", - &config, - vec![], - &library.read().unwrap(), - ); + config.slugify_taxonomies(); + let library = Arc::new(RwLock::new(Library::new(&config))); + let tag = TaxonomyTerm::new("Programming", &config.default_language, "tags", &[], &config); + let tag_fr = TaxonomyTerm::new("Programmation", "fr", "tags", &[], &config); let tags = Taxonomy { kind: taxo_config, lang: config.default_language.clone(), slug: "tags".to_string(), - permalink: "/tags/".to_string(), + path: "/tags/".to_string(), + permalink: "https://vincent.is/tags/".to_string(), items: vec![tag], }; let tags_fr = Taxonomy { kind: taxo_config_fr, lang: "fr".to_owned(), slug: "tags".to_string(), - permalink: "/fr/tags/".to_string(), + path: "/fr/tags/".to_string(), + permalink: "https://vincent.is/fr/tags/".to_string(), items: vec![tag_fr], }; @@ -279,28 +268,22 @@ mod tests { let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; let taxo_config_fr = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; - let library = Library::new(0, 0, false); - let tag = TaxonomyItem::new( - "Programming", - &config.default_language, - "tags", - &config, - vec![], - &library, - ); - let tag_fr = TaxonomyItem::new("Programmation", "fr", "tags", &config, vec![], &library); + let tag = TaxonomyTerm::new("Programming", &config.default_language, "tags", &[], &config); + let tag_fr = TaxonomyTerm::new("Programmation", "fr", "tags", &[], &config); let tags = Taxonomy { kind: taxo_config, lang: config.default_language.clone(), slug: "tags".to_string(), - permalink: "/tags/".to_string(), + path: "/tags/".to_string(), + permalink: "https://vincent.is/tags/".to_string(), items: vec![tag], }; let tags_fr = Taxonomy { kind: taxo_config_fr, lang: "fr".to_owned(), slug: "tags".to_string(), - permalink: "/fr/tags/".to_string(), + path: "/fr/tags/".to_string(), + permalink: "https://vincent.is/fr/tags/".to_string(), items: vec![tag_fr], }; diff --git a/components/templates/src/global_fns/files.rs b/components/templates/src/global_fns/files.rs index 13e95117bd..57c72f398a 100644 --- a/components/templates/src/global_fns/files.rs +++ b/components/templates/src/global_fns/files.rs @@ -3,10 +3,11 @@ use std::path::PathBuf; use std::{fs, io, result}; use crate::global_fns::helpers::search_for_file; -use base64::encode as encode_b64; use config::Config; -use sha2::{digest, Sha256, Sha384, Sha512}; -use tera::{from_value, to_value, Function as TeraFn, Result, Value}; +use libs::base64::encode as encode_b64; +use libs::sha2::{digest, Sha256, Sha384, Sha512}; +use libs::tera::{from_value, to_value, Function as TeraFn, Result, Value}; +use libs::url; use utils::site::resolve_internal_link; fn compute_file_hash( @@ -104,8 +105,10 @@ impl TeraFn for GetUrl { let mut segments = vec![]; if lang != self.config.default_language { - segments.push(lang); - }; + if path.is_empty() || !path[1..].starts_with(&lang) { + segments.push(lang); + } + } segments.push(path); @@ -233,8 +236,8 @@ mod tests { use std::fs::create_dir; use std::path::PathBuf; + use libs::tera::{to_value, Function}; use tempfile::{tempdir, TempDir}; - use tera::{to_value, Function}; use config::Config; use utils::fs::create_file; @@ -365,7 +368,12 @@ title = "A title" ); let config = Config::parse(CONFIG_DATA).unwrap(); let dir = create_temp_dir(); - let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks, PathBuf::new()); + let static_fn = GetUrl::new( + dir.path().to_path_buf(), + config.clone(), + permalinks.clone(), + PathBuf::new(), + ); let mut args = HashMap::new(); args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap()); args.insert("lang".to_string(), to_value("fr").unwrap()); @@ -398,6 +406,29 @@ title = "A title" ); } + #[test] + fn does_not_duplicate_lang() { + let config = Config::parse(CONFIG_DATA).unwrap(); + let mut permalinks = HashMap::new(); + permalinks.insert( + "a_section/a_page.md".to_string(), + "https://remplace-par-ton-url.fr/a_section/a_page/".to_string(), + ); + permalinks.insert( + "a_section/a_page.en.md".to_string(), + "https://remplace-par-ton-url.fr/en/a_section/a_page/".to_string(), + ); + let dir = create_temp_dir(); + let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks, PathBuf::new()); + let mut args = HashMap::new(); + args.insert("path".to_string(), to_value("/en/a_section/a_page/").unwrap()); + args.insert("lang".to_string(), to_value("en").unwrap()); + assert_eq!( + static_fn.call(&args).unwrap(), + "https://remplace-par-ton-url.fr/en/a_section/a_page" + ); + } + #[test] fn can_get_feed_url_with_default_language() { let config = Config::parse(CONFIG_DATA).unwrap(); diff --git a/components/templates/src/global_fns/i18n.rs b/components/templates/src/global_fns/i18n.rs index b1bf76d144..e859e0f802 100644 --- a/components/templates/src/global_fns/i18n.rs +++ b/components/templates/src/global_fns/i18n.rs @@ -1,4 +1,4 @@ -use tera::{from_value, to_value, Error, Function as TeraFn, Result, Value}; +use libs::tera::{from_value, to_value, Error, Function as TeraFn, Result, Value}; use config::Config; use std::collections::HashMap; diff --git a/components/templates/src/global_fns/images.rs b/components/templates/src/global_fns/images.rs index 6e0c06fecf..e9aad501cb 100644 --- a/components/templates/src/global_fns/images.rs +++ b/components/templates/src/global_fns/images.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use std::path::PathBuf; use std::sync::{Arc, Mutex}; -use tera::{from_value, to_value, Function as TeraFn, Result, Value}; +use libs::tera::{from_value, to_value, Function as TeraFn, Result, Value}; use crate::global_fns::helpers::search_for_file; @@ -144,10 +144,10 @@ mod tests { use std::fs::{copy, create_dir_all}; use config::Config; + use libs::tera::{to_value, Function}; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; use tempfile::{tempdir, TempDir}; - use tera::{to_value, Function}; fn create_dir_with_image() -> TempDir { let dir = tempdir().unwrap(); diff --git a/components/templates/src/global_fns/load_data.rs b/components/templates/src/global_fns/load_data.rs index 366b3be9f7..dc6c4210b8 100644 --- a/components/templates/src/global_fns/load_data.rs +++ b/components/templates/src/global_fns/load_data.rs @@ -5,18 +5,21 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::{Arc, Mutex}; -use csv::Reader; -use reqwest::header::{HeaderMap, HeaderName, HeaderValue, CONTENT_TYPE}; -use reqwest::{blocking::Client, header}; -use tera::{from_value, to_value, Error, Function as TeraFn, Map, Result, Value}; -use url::Url; +use libs::csv::Reader; +use libs::reqwest::header::{HeaderMap, HeaderName, HeaderValue, CONTENT_TYPE}; +use libs::reqwest::{blocking::Client, header}; +use libs::tera::{ + from_value, to_value, Error, Error as TeraError, Function as TeraFn, Map, Result, Value, +}; +use libs::url::Url; +use libs::{nom_bibtex, serde_json, serde_yaml, toml}; use utils::de::fix_toml_dates; use utils::fs::{get_file_time, read_file}; use crate::global_fns::helpers::search_for_file; static GET_DATA_ARGUMENT_ERROR_MESSAGE: &str = - "`load_data`: requires EITHER a `path` or `url` argument"; + "`load_data`: requires EITHER a `path`, `url`, or `literal` argument"; #[derive(Debug, PartialEq, Clone, Copy, Hash)] enum Method { @@ -43,6 +46,8 @@ enum OutputFormat { Csv, Bibtex, Plain, + Xml, + Yaml, } impl FromStr for OutputFormat { @@ -54,7 +59,9 @@ impl FromStr for OutputFormat { "csv" => Ok(OutputFormat::Csv), "json" => Ok(OutputFormat::Json), "bibtex" => Ok(OutputFormat::Bibtex), + "xml" => Ok(OutputFormat::Xml), "plain" => Ok(OutputFormat::Plain), + "yaml" => Ok(OutputFormat::Yaml), format => Err(format!("Unknown output format {}", format).into()), } } @@ -67,7 +74,9 @@ impl OutputFormat { OutputFormat::Csv => "text/csv", OutputFormat::Toml => "application/toml", OutputFormat::Bibtex => "application/x-bibtex", + OutputFormat::Xml => "text/xml", OutputFormat::Plain => "text/plain", + OutputFormat::Yaml => "application/x-yaml", }) } } @@ -76,6 +85,7 @@ impl OutputFormat { enum DataSource { Url(Url), Path(PathBuf), + Literal(String), } impl DataSource { @@ -87,11 +97,16 @@ impl DataSource { fn from_args( path_arg: Option, url_arg: Option, + literal_arg: Option, base_path: &Path, theme: &Option, output_path: &Path, ) -> Result> { - if path_arg.is_some() && url_arg.is_some() { + // only one of `path`, `url`, or `literal` can be specified + if (path_arg.is_some() && url_arg.is_some()) + || (path_arg.is_some() && literal_arg.is_some()) + || (url_arg.is_some() && literal_arg.is_some()) + { return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into()); } @@ -111,6 +126,10 @@ impl DataSource { .map_err(|e| format!("`load_data`: Failed to parse {} as url: {}", url, e).into()); } + if let Some(string_literal) = literal_arg { + return Ok(Some(DataSource::Literal(string_literal))); + } + Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into()) } @@ -141,6 +160,8 @@ impl Hash for DataSource { path.hash(state); get_file_time(path).expect("get file time").hash(state); } + // TODO: double check expectations here + DataSource::Literal(string_literal) => string_literal.hash(state), }; } } @@ -190,7 +211,7 @@ fn add_headers_from_args(header_args: Option>) -> Result } /// A Tera function to load data from a file or from a URL -/// Currently the supported formats are json, toml, csv, bibtex and plain text +/// Currently the supported formats are json, toml, csv, yaml, bibtex and plain text #[derive(Debug)] pub struct LoadData { base_path: PathBuf, @@ -217,6 +238,8 @@ impl TeraFn for LoadData { // Either a local path or a URL let path_arg = optional_arg!(String, args.get("path"), GET_DATA_ARGUMENT_ERROR_MESSAGE); let url_arg = optional_arg!(String, args.get("url"), GET_DATA_ARGUMENT_ERROR_MESSAGE); + let literal_arg = + optional_arg!(String, args.get("literal"), GET_DATA_ARGUMENT_ERROR_MESSAGE); // Optional general params let format_arg = optional_arg!( String, @@ -261,6 +284,7 @@ impl TeraFn for LoadData { DataSource::from_args( path_arg.clone(), url_arg, + literal_arg, &self.base_path, &self.theme, &self.output_path, @@ -358,6 +382,7 @@ impl TeraFn for LoadData { } } } + DataSource::Literal(string_literal) => Ok(string_literal), }?; let result_value: Result = match file_format { @@ -365,6 +390,8 @@ impl TeraFn for LoadData { OutputFormat::Csv => load_csv(data), OutputFormat::Json => load_json(data), OutputFormat::Bibtex => load_bibtex(data), + OutputFormat::Xml => load_xml(data), + OutputFormat::Yaml => load_yaml(data), OutputFormat::Plain => to_value(data).map_err(|e| e.into()), }; @@ -383,6 +410,13 @@ fn load_json(json_data: String) -> Result { Ok(json_content) } +/// Parse a YAML string and convert it to a Tera Value +fn load_yaml(yaml_data: String) -> Result { + let yaml_content: Value = + serde_yaml::from_str(yaml_data.as_str()).map_err(|e| format!("{:?}", e))?; + Ok(yaml_content) +} + /// Parse a TOML string and convert it to a Tera Value fn load_toml(toml_data: String) -> Result { let toml_content: toml::Value = toml::from_str(&toml_data).map_err(|e| format!("{:?}", e))?; @@ -476,7 +510,7 @@ fn load_csv(csv_data: String) -> Result { let record = match result { Ok(r) => r, Err(e) => { - return Err(tera::Error::chain( + return Err(TeraError::chain( String::from("Error encountered when parsing csv records"), e, )); @@ -499,6 +533,42 @@ fn load_csv(csv_data: String) -> Result { to_value(csv_value).map_err(|err| err.into()) } +/// Parse an XML string and convert it to a Tera Value +/// +/// An example XML file `example.xml` could be: +/// ```xml +/// +/// Number +/// Title +/// +/// 1 +/// Gutenberg +/// +/// +/// 2 +/// Printing +/// +/// +/// ``` +/// The json value output would be: +/// ```json +/// { +/// "root": { +/// "headers": ["Number", "Title"], +/// "records": [ +/// ["1", "Gutenberg"], +/// ["2", "Printing"] +/// ] +/// } +/// } +/// ``` +fn load_xml(xml_data: String) -> Result { + let xml_content: Value = + libs::quickxml_to_serde::xml_string_to_json(xml_data, &Default::default()) + .map_err(|e| format!("{:?}", e))?; + Ok(xml_content) +} + #[cfg(test)] mod tests { use super::{DataSource, LoadData, OutputFormat}; @@ -507,11 +577,11 @@ mod tests { use std::path::PathBuf; use crate::global_fns::load_data::Method; + use libs::serde_json::json; + use libs::tera::{self, to_value, Function}; use mockito::mock; - use serde_json::json; use std::fs::{copy, create_dir_all}; use tempfile::tempdir; - use tera::{to_value, Function}; // NOTE: HTTP mock paths below are randomly generated to avoid name // collisions. Mocks with the same path can sometimes bleed between tests @@ -1004,6 +1074,46 @@ mod tests { ) } + #[test] + fn can_load_xml() { + let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new()); + let mut args = HashMap::new(); + args.insert("path".to_string(), to_value("test.xml").unwrap()); + let result = static_fn.call(&args.clone()).unwrap(); + + assert_eq!( + result, + json!({ + "root": { + "key": "value", + "array": [1, 2, 3], + "subpackage": { + "subkey": 5 + } + } + }) + ) + } + + #[test] + fn can_load_yaml() { + let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new()); + let mut args = HashMap::new(); + args.insert("path".to_string(), to_value("test.yaml").unwrap()); + let result = static_fn.call(&args.clone()).unwrap(); + + assert_eq!( + result, + json!({ + "key": "value", + "array": [1, 2, 3], + "subpackage": { + "subkey": 5 + } + }) + ) + } + #[test] fn is_load_remote_data_using_post_method_with_different_body_not_cached() { let _mjson = mock("POST", "/kr1zdgbm4y3") @@ -1153,4 +1263,96 @@ mod tests { _mjson.assert(); } + + #[test] + fn can_load_plain_literal() { + let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new()); + let mut args = HashMap::new(); + let plain_str = "abc 123"; + args.insert("literal".to_string(), to_value(plain_str).unwrap()); + + let result = static_fn.call(&args.clone()).unwrap(); + + assert_eq!(result, plain_str); + } + + #[test] + fn can_load_json_literal() { + let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new()); + let mut args = HashMap::new(); + let json_str = r#"{ + "key": "value", + "array": [1, 2, 3], + "subpackage": { + "subkey": 5 + } + }"#; + args.insert("literal".to_string(), to_value(json_str).unwrap()); + args.insert("format".to_string(), to_value("json").unwrap()); + + let result = static_fn.call(&args.clone()).unwrap(); + + assert_eq!( + result, + json!({ + "key": "value", + "array": [1, 2, 3], + "subpackage": { + "subkey": 5 + } + }) + ); + } + + #[test] + fn can_load_toml_literal() { + let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new()); + let mut args = HashMap::new(); + let toml_str = r#" + [category] + key = "value" + date = 1979-05-27T07:32:00Z + lt1 = 07:32:00 + "#; + args.insert("literal".to_string(), to_value(toml_str).unwrap()); + args.insert("format".to_string(), to_value("toml").unwrap()); + + let result = static_fn.call(&args.clone()).unwrap(); + + // TOML does not load in order + assert_eq!( + result, + json!({ + "category": { + "date": "1979-05-27T07:32:00Z", + "lt1": "07:32:00", + "key": "value" + }, + }) + ); + } + + #[test] + fn can_load_csv_literal() { + let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new()); + let mut args = HashMap::new(); + let csv_str = r#"Number,Title +1,Gutenberg +2,Printing"#; + args.insert("literal".to_string(), to_value(csv_str).unwrap()); + args.insert("format".to_string(), to_value("csv").unwrap()); + + let result = static_fn.call(&args.clone()).unwrap(); + + assert_eq!( + result, + json!({ + "headers": ["Number", "Title"], + "records": [ + ["1", "Gutenberg"], + ["2", "Printing"] + ], + }) + ) + } } diff --git a/components/templates/src/lib.rs b/components/templates/src/lib.rs index 7104e543aa..120b8fc88b 100644 --- a/components/templates/src/lib.rs +++ b/components/templates/src/lib.rs @@ -4,47 +4,42 @@ pub mod global_fns; use std::path::Path; use config::Config; -use lazy_static::lazy_static; -use tera::{Context, Tera}; +use libs::once_cell::sync::Lazy; +use libs::tera::{Context, Tera}; -use errors::{bail, Error, Result}; +use errors::{bail, Context as ErrorContext, Result}; use utils::templates::rewrite_theme_paths; -lazy_static! { - pub static ref ZOLA_TERA: Tera = { - let mut tera = Tera::default(); - tera.add_raw_templates(vec![ - ("__zola_builtins/404.html", include_str!("builtins/404.html")), - ("__zola_builtins/atom.xml", include_str!("builtins/atom.xml")), - ("__zola_builtins/rss.xml", include_str!("builtins/rss.xml")), - ("__zola_builtins/sitemap.xml", include_str!("builtins/sitemap.xml")), - ("__zola_builtins/robots.txt", include_str!("builtins/robots.txt")), - ( - "__zola_builtins/split_sitemap_index.xml", - include_str!("builtins/split_sitemap_index.xml"), - ), - ("__zola_builtins/anchor-link.html", include_str!("builtins/anchor-link.html")), - ( - "__zola_builtins/shortcodes/youtube.html", - include_str!("builtins/shortcodes/youtube.html"), - ), - ( - "__zola_builtins/shortcodes/vimeo.html", - include_str!("builtins/shortcodes/vimeo.html"), - ), - ("__zola_builtins/shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")), - ( - "__zola_builtins/shortcodes/streamable.html", - include_str!("builtins/shortcodes/streamable.html"), - ), - ("internal/alias.html", include_str!("builtins/internal/alias.html")), - ]) - .unwrap(); - tera.register_filter("base64_encode", filters::base64_encode); - tera.register_filter("base64_decode", filters::base64_decode); - tera - }; -} +pub static ZOLA_TERA: Lazy = Lazy::new(|| { + let mut tera = Tera::default(); + tera.add_raw_templates(vec![ + ("__zola_builtins/404.html", include_str!("builtins/404.html")), + ("__zola_builtins/atom.xml", include_str!("builtins/atom.xml")), + ("__zola_builtins/rss.xml", include_str!("builtins/rss.xml")), + ("__zola_builtins/sitemap.xml", include_str!("builtins/sitemap.xml")), + ("__zola_builtins/robots.txt", include_str!("builtins/robots.txt")), + ( + "__zola_builtins/split_sitemap_index.xml", + include_str!("builtins/split_sitemap_index.xml"), + ), + ("__zola_builtins/anchor-link.html", include_str!("builtins/anchor-link.html")), + ( + "__zola_builtins/shortcodes/youtube.html", + include_str!("builtins/shortcodes/youtube.html"), + ), + ("__zola_builtins/shortcodes/vimeo.html", include_str!("builtins/shortcodes/vimeo.html")), + ("__zola_builtins/shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")), + ( + "__zola_builtins/shortcodes/streamable.html", + include_str!("builtins/shortcodes/streamable.html"), + ), + ("internal/alias.html", include_str!("builtins/internal/alias.html")), + ]) + .unwrap(); + tera.register_filter("base64_encode", filters::base64_encode); + tera.register_filter("base64_decode", filters::base64_decode); + tera +}); /// Renders the `internal/alias.html` template that will redirect /// via refresh to the url given @@ -53,17 +48,17 @@ pub fn render_redirect_template(url: &str, tera: &Tera) -> Result { context.insert("url", &url); tera.render("internal/alias.html", &context) - .map_err(|e| Error::chain(format!("Failed to render alias for '{}'", url), e)) + .with_context(|| format!("Failed to render alias for '{}'", url)) } pub fn load_tera(path: &Path, config: &Config) -> Result { let tpl_glob = - format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.{*ml,md}"); + format!("{}/{}", path.to_string_lossy().replace('\\', "/"), "templates/**/*.{*ml,md}"); // Only parsing as we might be extending templates from themes and that would error // as we haven't loaded them yet let mut tera = - Tera::parse(&tpl_glob).map_err(|e| Error::chain("Error parsing templates", e))?; + Tera::parse(&tpl_glob).context("Error parsing templates from the /templates directory")?; if let Some(ref theme) = config.theme { // Test that the templates folder exist for that theme @@ -73,12 +68,12 @@ pub fn load_tera(path: &Path, config: &Config) -> Result { } let theme_tpl_glob = format!( - "{}/{}", - path.to_string_lossy().replace("\\", "/"), - format!("themes/{}/templates/**/*.{{*ml,md}}", theme) + "{}/themes/{}/templates/**/*.{{*ml,md}}", + path.to_string_lossy().replace('\\', "/"), + theme ); - let mut tera_theme = Tera::parse(&theme_tpl_glob) - .map_err(|e| Error::chain("Error parsing templates from themes", e))?; + let mut tera_theme = + Tera::parse(&theme_tpl_glob).context("Error parsing templates from themes")?; rewrite_theme_paths(&mut tera_theme, theme); // TODO: add tests for theme-provided robots.txt (https://github.com/getzola/zola/pull/1722) diff --git a/components/utils/Cargo.toml b/components/utils/Cargo.toml index 4773a441be..65c4513381 100644 --- a/components/utils/Cargo.toml +++ b/components/utils/Cargo.toml @@ -6,18 +6,10 @@ edition = "2018" include = ["src/**/*"] [dependencies] -tera = "1" -unicode-segmentation = "1.2" -walkdir = "2" -regex="1" -toml = "0.5" serde = { version = "1.0", features = ["derive"] } -slug = "0.1" -percent-encoding = "2" -filetime = "0.2.12" -minify-html = "0.6" errors = { path = "../errors" } +libs = { path = "../libs" } [dev-dependencies] tempfile = "3" diff --git a/components/utils/src/links.rs b/components/utils/src/anchors.rs similarity index 57% rename from components/utils/src/links.rs rename to components/utils/src/anchors.rs index 3e18b2ada0..a5706e79d9 100644 --- a/components/utils/src/links.rs +++ b/components/utils/src/anchors.rs @@ -1,29 +1,26 @@ -use regex::Regex; - +use libs::regex::escape; +use libs::regex::Regex; pub fn has_anchor_id(content: &str, anchor: &str) -> bool { let checks = anchor_id_checks(anchor); checks.is_match(content) } -fn anchor_id_checks(anchor:&str) -> Regex { - Regex::new( - &format!(r#" (?i)(id|name) *= *("|')*{}("|'| |>)+"#, anchor) - ).unwrap() +fn anchor_id_checks(anchor: &str) -> Regex { + Regex::new(&format!(r#"\s(?i)(id|name) *= *("|')*{}("|'| |>)+"#, escape(anchor))).unwrap() } - #[cfg(test)] -mod tests{ +mod tests { use super::anchor_id_checks; - fn check(anchor:&str, content:&str) -> bool { + fn check(anchor: &str, content: &str) -> bool { anchor_id_checks(anchor).is_match(content) } #[test] - fn matchers () { - let m = |content| {check("fred", content)}; + fn matchers() { + let m = |content| check("fred", content); // Canonical match/non match assert!(m(r#""#)); @@ -43,5 +40,16 @@ mod tests{ // Case variants assert!(m(r#""#)); assert!(m(r#""#)); + + // Newline variants + assert!(m(r#""#)); + + // Escaped Anchors + assert!(check("fred?george", r#""#)); + assert!(check("fred.george", r#""#)); + + // Non matchers + assert!(!m(r#""#)); } } diff --git a/components/utils/src/de.rs b/components/utils/src/de.rs index 20a91d592a..983b31d9da 100644 --- a/components/utils/src/de.rs +++ b/components/utils/src/de.rs @@ -1,5 +1,6 @@ +use libs::tera::{Map, Value}; +use libs::toml; use serde::{Deserialize, Deserializer}; -use tera::{Map, Value}; /// Used as an attribute when we want to convert from TOML to a string date /// If a TOML datetime isn't present, it will accept a string and push it through diff --git a/components/utils/src/fs.rs b/components/utils/src/fs.rs index 7b2a3aeb6d..8d3c22c8a8 100644 --- a/components/utils/src/fs.rs +++ b/components/utils/src/fs.rs @@ -1,27 +1,27 @@ -use filetime::{set_file_mtime, FileTime}; +use libs::filetime::{set_file_mtime, FileTime}; +use libs::walkdir::WalkDir; use std::fs::{copy, create_dir_all, metadata, File}; use std::io::prelude::*; use std::path::Path; use std::time::SystemTime; -use walkdir::WalkDir; -use errors::{Error, Result}; +use errors::{Context, Result}; pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result { let canonical_path = path .canonicalize() - .map_err(|e| format!("Failed to canonicalize {}: {}", path.display(), e))?; + .with_context(|| format!("Failed to canonicalize {}", path.display()))?; let canonical_parent = parent .canonicalize() - .map_err(|e| format!("Failed to canonicalize {}: {}", parent.display(), e))?; + .with_context(|| format!("Failed to canonicalize {}", parent.display()))?; Ok(canonical_path.starts_with(canonical_parent)) } /// Create a file with the content given pub fn create_file(path: &Path, content: &str) -> Result<()> { - let mut file = File::create(&path) - .map_err(|e| Error::chain(format!("Failed to create file {}", path.display()), e))?; + let mut file = + File::create(&path).with_context(|| format!("Failed to create file {}", path.display()))?; file.write_all(content.as_bytes())?; Ok(()) } @@ -38,9 +38,8 @@ pub fn ensure_directory_exists(path: &Path) -> Result<()> { /// exists before creating it pub fn create_directory(path: &Path) -> Result<()> { if !path.exists() { - create_dir_all(path).map_err(|e| { - Error::chain(format!("Was not able to create folder {}", path.display()), e) - })?; + create_dir_all(path) + .with_context(|| format!("Failed to create folder {}", path.display()))?; } Ok(()) } @@ -49,7 +48,7 @@ pub fn create_directory(path: &Path) -> Result<()> { pub fn read_file(path: &Path) -> Result { let mut content = String::new(); File::open(path) - .map_err(|e| Error::chain(format!("Failed to open '{}'", path.display()), e))? + .with_context(|| format!("Failed to open file {}", path.display()))? .read_to_string(&mut content)?; // Remove utf-8 BOM if any. @@ -67,8 +66,8 @@ pub fn copy_file(src: &Path, dest: &Path, base_path: &Path, hard_link: bool) -> let target_path = dest.join(relative_path); if let Some(parent_directory) = target_path.parent() { - create_dir_all(parent_directory).map_err(|e| { - Error::chain(format!("Was not able to create folder {}", parent_directory.display()), e) + create_dir_all(parent_directory).with_context(|| { + format!("Failed to create directory {}", parent_directory.display()) })?; } @@ -81,38 +80,29 @@ pub fn copy_file(src: &Path, dest: &Path, base_path: &Path, hard_link: bool) -> /// 3. Its filesize is identical to that of the src file. pub fn copy_file_if_needed(src: &Path, dest: &Path, hard_link: bool) -> Result<()> { if let Some(parent_directory) = dest.parent() { - create_dir_all(parent_directory).map_err(|e| { - Error::chain(format!("Was not able to create folder {}", parent_directory.display()), e) + create_dir_all(parent_directory).with_context(|| { + format!("Failed to create directory {}", parent_directory.display()) })?; } if hard_link { std::fs::hard_link(src, dest)? } else { - let src_metadata = metadata(src)?; + let src_metadata = metadata(src) + .with_context(|| format!("Failed to get metadata of {}", src.display()))?; let src_mtime = FileTime::from_last_modification_time(&src_metadata); if Path::new(&dest).is_file() { let target_metadata = metadata(&dest)?; let target_mtime = FileTime::from_last_modification_time(&target_metadata); if !(src_mtime == target_mtime && src_metadata.len() == target_metadata.len()) { - copy(src, &dest).map_err(|e| { - Error::chain( - format!( - "Was not able to copy file {} to {}", - src.display(), - dest.display() - ), - e, - ) + copy(src, &dest).with_context(|| { + format!("Was not able to copy file {} to {}", src.display(), dest.display()) })?; set_file_mtime(&dest, src_mtime)?; } } else { - copy(src, &dest).map_err(|e| { - Error::chain( - format!("Was not able to copy file {} to {}", src.display(), dest.display()), - e, - ) + copy(src, &dest).with_context(|| { + format!("Was not able to copy directory {} to {}", src.display(), dest.display()) })?; set_file_mtime(&dest, src_mtime)?; } @@ -121,7 +111,9 @@ pub fn copy_file_if_needed(src: &Path, dest: &Path, hard_link: bool) -> Result<( } pub fn copy_directory(src: &Path, dest: &Path, hard_link: bool) -> Result<()> { - for entry in WalkDir::new(src).into_iter().filter_map(std::result::Result::ok) { + for entry in + WalkDir::new(src).follow_links(true).into_iter().filter_map(std::result::Result::ok) + { let relative_path = entry.path().strip_prefix(src).unwrap(); let target_path = dest.join(relative_path); @@ -130,14 +122,12 @@ pub fn copy_directory(src: &Path, dest: &Path, hard_link: bool) -> Result<()> { create_directory(&target_path)?; } } else { - copy_file(entry.path(), dest, src, hard_link).map_err(|e| { - Error::chain( - format!( - "Was not able to copy file {} to {}", - entry.path().display(), - dest.display() - ), - e, + copy_file(entry.path(), dest, src, hard_link).with_context(|| { + format!( + "Was not able to copy {} to {} (hard_link={})", + entry.path().display(), + dest.display(), + hard_link ) })?; } @@ -183,6 +173,7 @@ mod tests { use std::path::PathBuf; use std::str::FromStr; + use libs::filetime; use tempfile::tempdir_in; use super::copy_file; @@ -197,7 +188,7 @@ mod tests { let src_file_path = src_dir.path().join("test.txt"); let dest_file_path = dest_dir.path().join(src_file_path.strip_prefix(&base_path).unwrap()); File::create(&src_file_path).unwrap(); - copy_file(&src_file_path, &dest_dir.path().to_path_buf(), &base_path, false).unwrap(); + copy_file(&src_file_path, dest_dir.path(), &base_path, false).unwrap(); assert_eq!( metadata(&src_file_path).and_then(|m| m.modified()).unwrap(), @@ -218,7 +209,7 @@ mod tests { let mut src_file = File::create(&src_file_path).unwrap(); src_file.write_all(b"file1").unwrap(); } - copy_file(&src_file_path, &dest_dir.path().to_path_buf(), &base_path, false).unwrap(); + copy_file(&src_file_path, dest_dir.path(), &base_path, false).unwrap(); { let mut dest_file = File::create(&dest_file_path).unwrap(); dest_file.write_all(b"file2").unwrap(); @@ -228,14 +219,14 @@ mod tests { filetime::set_file_mtime(&src_file_path, filetime::FileTime::from_unix_time(0, 0)).unwrap(); filetime::set_file_mtime(&dest_file_path, filetime::FileTime::from_unix_time(0, 0)) .unwrap(); - copy_file(&src_file_path, &dest_dir.path().to_path_buf(), &base_path, false).unwrap(); + copy_file(&src_file_path, dest_dir.path(), &base_path, false).unwrap(); assert_eq!(read_to_string(&src_file_path).unwrap(), "file1"); assert_eq!(read_to_string(&dest_file_path).unwrap(), "file2"); // Copy occurs if the timestamps are different while the filesizes are same. filetime::set_file_mtime(&dest_file_path, filetime::FileTime::from_unix_time(42, 42)) .unwrap(); - copy_file(&src_file_path, &dest_dir.path().to_path_buf(), &base_path, false).unwrap(); + copy_file(&src_file_path, dest_dir.path(), &base_path, false).unwrap(); assert_eq!(read_to_string(&src_file_path).unwrap(), "file1"); assert_eq!(read_to_string(&dest_file_path).unwrap(), "file1"); @@ -246,7 +237,7 @@ mod tests { } filetime::set_file_mtime(&dest_file_path, filetime::FileTime::from_unix_time(0, 0)) .unwrap(); - copy_file(&src_file_path, &dest_dir.path().to_path_buf(), &base_path, false).unwrap(); + copy_file(&src_file_path, dest_dir.path(), &base_path, false).unwrap(); assert_eq!(read_to_string(&src_file_path).unwrap(), "file1"); assert_eq!(read_to_string(&dest_file_path).unwrap(), "file1"); } diff --git a/components/utils/src/lib.rs b/components/utils/src/lib.rs index 2a8162d0e1..3b2ab6022d 100644 --- a/components/utils/src/lib.rs +++ b/components/utils/src/lib.rs @@ -1,9 +1,9 @@ +pub mod anchors; pub mod de; pub mod fs; -pub mod links; -pub mod minify; pub mod net; pub mod site; pub mod slugs; +pub mod table_of_contents; pub mod templates; -pub mod vec; +pub mod types; diff --git a/components/utils/src/site.rs b/components/utils/src/site.rs index d49e559afb..3561c3fc99 100644 --- a/components/utils/src/site.rs +++ b/components/utils/src/site.rs @@ -1,18 +1,7 @@ -use percent_encoding::percent_decode; +use libs::percent_encoding::percent_decode; use std::collections::HashMap; -use std::hash::BuildHasher; -use unicode_segmentation::UnicodeSegmentation; -use errors::Result; - -/// Get word count and estimated reading time -pub fn get_reading_analytics(content: &str) -> (usize, usize) { - let word_count: usize = content.unicode_words().count(); - - // https://help.medium.com/hc/en-us/articles/214991667-Read-time - // 275 seems a bit too high though - (word_count, ((word_count + 199) / 200)) -} +use errors::{anyhow, Result}; /// Result of a successful resolution of an internal link. #[derive(Debug, PartialEq, Clone)] @@ -22,15 +11,15 @@ pub struct ResolvedInternalLink { /// Internal path to the .md file, without the leading `@/`. pub md_path: String, /// Optional anchor target. - /// We can check whether it exists only after all the markdown rendering is done. + /// We can check whether it exists only after all the markdown markdown is done. pub anchor: Option, } /// Resolves an internal link (of the `@/posts/something.md#hey` sort) to its absolute link and /// returns the path + anchor as well -pub fn resolve_internal_link( +pub fn resolve_internal_link( link: &str, - permalinks: &HashMap, + permalinks: &HashMap, ) -> Result { // First we remove the ./ since that's zola specific let clean_link = link.replacen("@/", "", 1); @@ -41,7 +30,7 @@ pub fn resolve_internal_link( // to decode them first let decoded = percent_decode(parts[0].as_bytes()).decode_utf8_lossy().to_string(); let target = - permalinks.get(&decoded).ok_or_else(|| format!("Relative link {} not found.", link))?; + permalinks.get(&decoded).ok_or_else(|| anyhow!("Relative link {} not found.", link))?; if parts.len() > 1 { Ok(ResolvedInternalLink { permalink: format!("{}#{}", target, parts[1]), @@ -57,7 +46,7 @@ pub fn resolve_internal_link( mod tests { use std::collections::HashMap; - use super::{get_reading_analytics, resolve_internal_link}; + use super::resolve_internal_link; #[test] fn can_resolve_valid_internal_link() { @@ -103,29 +92,4 @@ mod tests { let res = resolve_internal_link("@/pages/about.md#hello", &HashMap::new()); assert!(res.is_err()); } - - #[test] - fn reading_analytics_empty_text() { - let (word_count, reading_time) = get_reading_analytics(" "); - assert_eq!(word_count, 0); - assert_eq!(reading_time, 0); - } - - #[test] - fn reading_analytics_short_text() { - let (word_count, reading_time) = get_reading_analytics("Hello World"); - assert_eq!(word_count, 2); - assert_eq!(reading_time, 1); - } - - #[test] - fn reading_analytics_long_text() { - let mut content = String::new(); - for _ in 0..1000 { - content.push_str(" Hello world"); - } - let (word_count, reading_time) = get_reading_analytics(&content); - assert_eq!(word_count, 2000); - assert_eq!(reading_time, 10); - } } diff --git a/components/utils/src/slugs.rs b/components/utils/src/slugs.rs index 8f209254d5..acacd1ee20 100644 --- a/components/utils/src/slugs.rs +++ b/components/utils/src/slugs.rs @@ -1,3 +1,4 @@ +use libs::slug; use serde::{Deserialize, Serialize}; #[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] diff --git a/components/rendering/src/table_of_contents.rs b/components/utils/src/table_of_contents.rs similarity index 99% rename from components/rendering/src/table_of_contents.rs rename to components/utils/src/table_of_contents.rs index 8e7773d012..75bbf21487 100644 --- a/components/rendering/src/table_of_contents.rs +++ b/components/utils/src/table_of_contents.rs @@ -1,4 +1,4 @@ -use serde_derive::Serialize; +use serde::Serialize; /// Populated while receiving events from the markdown parser #[derive(Debug, Default, PartialEq, Clone, Serialize)] diff --git a/components/utils/src/templates.rs b/components/utils/src/templates.rs index 814e59bca7..62188a304a 100644 --- a/components/utils/src/templates.rs +++ b/components/utils/src/templates.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; -use tera::{Context, Tera}; +use libs::tera::{Context, Tera}; use errors::{bail, Result}; @@ -78,7 +78,7 @@ pub fn render_template( theme: &Option, ) -> Result { if let Some(template) = check_template_fallbacks(name, tera, theme) { - return tera.render(&template, &context).map_err(std::convert::Into::into); + return tera.render(template, &context).map_err(std::convert::Into::into); } // maybe it's a default one? @@ -150,7 +150,7 @@ mod tests { use crate::templates::check_template_fallbacks; use super::rewrite_theme_paths; - use tera::Tera; + use libs::tera::Tera; #[test] fn can_rewrite_all_paths_of_theme() { @@ -178,8 +178,8 @@ mod tests { #[test] fn template_fallback_is_successful() { let mut tera = Tera::parse("test-templates/*.html").unwrap(); - tera.add_raw_template(&"hyde/templates/index.html", "Hello").unwrap(); - tera.add_raw_template(&"hyde/templates/theme-only.html", "Hello").unwrap(); + tera.add_raw_template("hyde/templates/index.html", "Hello").unwrap(); + tera.add_raw_template("hyde/templates/theme-only.html", "Hello").unwrap(); // Check finding existing template assert_eq!(check_template_fallbacks("index.html", &tera, &None), Some("index.html")); diff --git a/components/utils/src/types.rs b/components/utils/src/types.rs new file mode 100644 index 0000000000..d4d53a151d --- /dev/null +++ b/components/utils/src/types.rs @@ -0,0 +1,16 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum InsertAnchor { + Left, + Right, + Heading, + None, +} + +impl InsertAnchor { + pub fn uses_template(&self) -> bool { + matches!(self, InsertAnchor::Left | InsertAnchor::Right) + } +} diff --git a/components/utils/src/vec.rs b/components/utils/src/vec.rs deleted file mode 100644 index 346769c113..0000000000 --- a/components/utils/src/vec.rs +++ /dev/null @@ -1,44 +0,0 @@ -pub trait InsertMany { - type Element; - fn insert_many(&mut self, elem_to_insert: Vec<(usize, Self::Element)>); -} - -impl InsertMany for Vec { - type Element = T; - - /// Efficiently insert multiple element in their specified index. - /// The elements should sorted in ascending order by their index. - /// - /// This is done in O(n) time. - fn insert_many(&mut self, elem_to_insert: Vec<(usize, T)>) { - let mut inserted = vec![]; - let mut last_idx = 0; - - for (idx, elem) in elem_to_insert.into_iter() { - let head_len = idx - last_idx; - inserted.extend(self.splice(0..head_len, std::iter::empty())); - inserted.push(elem); - last_idx = idx; - } - let len = self.len(); - inserted.extend(self.drain(0..len)); - - *self = inserted; - } -} - -#[cfg(test)] -mod test { - use super::InsertMany; - - #[test] - fn insert_many_works() { - let mut v = vec![1, 2, 3, 4, 5]; - v.insert_many(vec![(0, 0), (2, -1), (5, 6)]); - assert_eq!(v, &[0, 1, 2, -1, 3, 4, 5, 6]); - - let mut v2 = vec![1, 2, 3, 4, 5]; - v2.insert_many(vec![(0, 0), (2, -1)]); - assert_eq!(v2, &[0, 1, 2, -1, 3, 4, 5]); - } -} diff --git a/components/utils/test-files/test.xml b/components/utils/test-files/test.xml new file mode 100644 index 0000000000..d492f60207 --- /dev/null +++ b/components/utils/test-files/test.xml @@ -0,0 +1,9 @@ + + value + 1 + 2 + 3 + + 5 + + diff --git a/components/utils/test-files/test.yaml b/components/utils/test-files/test.yaml new file mode 100644 index 0000000000..37d3f9a6df --- /dev/null +++ b/components/utils/test-files/test.yaml @@ -0,0 +1,9 @@ +--- + key: "value" + array: + - 1 + - 2 + - 3 + subpackage: + subkey: 5 + diff --git a/docs/content/documentation/content/linking.md b/docs/content/documentation/content/linking.md index 4eefd62931..a426e0641b 100644 --- a/docs/content/documentation/content/linking.md +++ b/docs/content/documentation/content/linking.md @@ -19,10 +19,10 @@ For example: ## Example code <- example-code-1 ``` -You can also manually specify an id with a `{#…}` suffix on the heading line: +You can also manually specify an id with a `{#…}` suffix on the heading line as well as CSS classes: ```md -# Something manual! {#manual} +# Something manual! {#manual .header .bold} ``` This is useful for making deep links robust, either proactively (so that you can later change the text of a heading @@ -32,7 +32,7 @@ links working. ## Anchor insertion It is possible to have Zola automatically insert anchor links next to the heading, as you can see on this documentation -if you hover a title. +if you hover a title or covering the full heading text. This option is set at the section level: the `insert_anchor_links` variable on the [section front matter page](@/documentation/content/section.md#front-matter). @@ -47,6 +47,9 @@ The anchor link template has the following variables: - `lang`: the current language, unless called from the `markdown` template filter, in which case it will always be `en` - `level`: the heading level (between 1 and 6) +If you use `insert_anchor = "heading"`, the template will still be used but only the opening `` tag will get extracted +from it, everything else will not be used. + ## Internal links Linking to other pages and their headings is so common that Zola adds a special syntax to Markdown links to handle them: start the link with `@/` and point to the `.md` file you want @@ -54,3 +57,5 @@ to link to. The path to the file starts from the `content` directory. For example, linking to a file located at `content/pages/about.md` would be `[my link](@/pages/about.md)`. You can still link to an anchor directly; `[my link](@/pages/about.md#example)` will work as expected. + +By default, broken internal links are treated as errors. To treat them as warnings instead, visit the `[link_checker]` section of `config.toml` and set `internal_level = "warn"`. Note: treating broken links as warnings allows the site to be built with broken links intact, so a link such as `[my link](@/pages/whoops.md)` will be rendered to HTML as ``. diff --git a/docs/content/documentation/content/multilingual.md b/docs/content/documentation/content/multilingual.md index 9da9eba6b9..8f20bf1319 100644 --- a/docs/content/documentation/content/multilingual.md +++ b/docs/content/documentation/content/multilingual.md @@ -27,6 +27,9 @@ summary = "Mon blog" [languages.it.translations] summary = "Mio blog" +# translations for the default language are not prefixed by languages.code +[translations] +summary = "My blog" ``` Note: By default, Chinese and Japanese search indexing is not included. You can include diff --git a/docs/content/documentation/content/overview.md b/docs/content/documentation/content/overview.md index a3e78aa924..c81f7b28d1 100644 --- a/docs/content/documentation/content/overview.md +++ b/docs/content/documentation/content/overview.md @@ -70,13 +70,13 @@ By default, this page's slug will be the directory name and thus its permalink w It is possible to ignore selected asset files using the [ignored_content](@/documentation/getting-started/configuration.md) setting in the config file. -For example, say that you have an Excel spreadsheet from which you are taking several screenshots and -then linking to these image files on your website. For maintainability, you want to keep -the spreadsheet in the same directory as the Markdown file, but you don't want to copy the spreadsheet to -the public web site. You can achieve this by setting `ignored_content` in the config file: +For example, say that you have several code files which you are linking to on your website. +For maintainability, you want to keep your code in the same directory as the Markdown file, +but you don't want to copy the build folders to the public web site. You can achieve this by setting `ignored_content` in the config file: +(Note of caution: `{Cargo.lock,target}` is _not_ the same as `{Cargo.lock, target}`) ``` -ignored_content = ["*.xlsx"] +ignored_content = ["code_articles/**/{Cargo.lock,target}, *.rs"] ``` ## Static assets @@ -85,7 +85,7 @@ In addition to placing content files in the `content` directory, you may also pl files in the `static` directory. Any files/directories that you place in the `static` directory will be copied, without modification, to the `public` directory. -Typically, you might put site-wide assets (such as the site favicon, site logos or site-wide +Typically, you might put site-wide assets (such as a CSS file, the site favicon, site logos or site-wide JavaScript) in the root of the static directory. You can also place any HTML or other files that you wish to be included without modification (that is, without being parsed as Markdown files) into the static directory. diff --git a/docs/content/documentation/content/page.md b/docs/content/documentation/content/page.md index c0e8079630..0f018e04cd 100644 --- a/docs/content/documentation/content/page.md +++ b/docs/content/documentation/content/page.md @@ -4,7 +4,7 @@ weight = 30 +++ A page is any file ending with `.md` in the `content` directory, except files -named `_index.md`. +named `_index.md`. **Note:** page file names must not contain `_index.` [at all](https://github.com/getzola/zola/pull/1694). If a file ending with `.md` is named `index.md`, it will generate a page with the name of its directory (for example, `/content/about/index.md` would diff --git a/docs/content/documentation/content/sass.md b/docs/content/documentation/content/sass.md index 6b188e1d91..980955ec99 100644 --- a/docs/content/documentation/content/sass.md +++ b/docs/content/documentation/content/sass.md @@ -11,6 +11,8 @@ may be of interest: * The [official Sass website](https://sass-lang.com/) * [Why Sass?](https://alistapart.com/article/why-sass), by Dan Cederholm +It currently uses a wrapper around LibSass 3.6.4. + ## Using Sass in Zola Zola processes any files with the `sass` or `scss` extension in the `sass` diff --git a/docs/content/documentation/content/search.md b/docs/content/documentation/content/search.md index 4fb75bbde3..d0f8779cb5 100644 --- a/docs/content/documentation/content/search.md +++ b/docs/content/documentation/content/search.md @@ -21,6 +21,9 @@ As each site will be different, Zola makes no assumptions about your search func the JavaScript/CSS code to do an actual search and display results. You can look at how this site implements it to get an idea: [search.js](https://github.com/getzola/zola/tree/master/docs/static/search.js). +If you are using a language other than English, you will also need to include the corresponding JavaScript stemmer file. +See for details. + ## Configuring the search index In some cases, the default indexing strategy is not suitable. You can customize which fields to include and whether to truncate the content in the [search configuration](@/documentation/getting-started/configuration.md). diff --git a/docs/content/documentation/content/section.md b/docs/content/documentation/content/section.md index 5d0755735a..527451473d 100644 --- a/docs/content/documentation/content/section.md +++ b/docs/content/documentation/content/section.md @@ -48,7 +48,7 @@ description = "" # A draft section is only loaded if the `--drafts` flag is passed to `zola build`, `zola serve` or `zola check`. draft = false -# Used to sort pages by "date", "title", "weight", or "none". See below for more information. +# Used to sort pages by "date", "update_date", "title", "title_bytes", "weight", or "none". See below for more information. sort_by = "none" # Used by the parent section to order its subsections. @@ -79,7 +79,8 @@ paginate_reversed = false # This determines whether to insert a link for each header like the ones you can see on this site if you hover over # a header. # The default template can be overridden by creating an `anchor-link.html` file in the `templates` directory. -# This value can be "left", "right" or "none". +# This value can be "left", "right", "heading" or "none". +# "heading" means the full heading becomes the text of the anchor. insert_anchor_links = "none" # If set to "true", the section pages will be in the search index. This is only used if @@ -142,8 +143,8 @@ create a list of links to the posts, a simple template might look like this: This would iterate over the posts in the order specified by the `sort_by` variable set in the `_index.md` page for the corresponding -section. The `sort_by` variable can be given one of three values: `date`, -`title`, `weight` or `none`. If `sort_by` is not set, the pages will be +section. The `sort_by` variable can be given a few values: `date`, `update_date` +`title`, `title_bytes`, `weight` or `none`. If `sort_by` is not set, the pages will be sorted in the `none` order, which is not intended for sorted content. Any page that is missing the data it needs to be sorted will be ignored and @@ -159,28 +160,37 @@ The `sort_by` front-matter variable can have the following values: ### `date` This will sort all pages by their `date` field, from the most recent (at the -top of the list) to the oldest (at the bottom of the list). Each page will -get `page.earlier` and `page.later` variables that contain the pages with +top of the list) to the oldest (at the bottom of the list). Each page will +get `page.lower` and `page.higher` variables that contain the pages with earlier and later dates, respectively. +### `update_date` +Same as `date` except it will take into account any `updated` date for the pages. + ### `title` This will sort all pages by their `title` field in natural lexical order, as defined by `natural_lexical_cmp` in the [lexical-sort] crate. Each page will -get `page.title_prev` and `page.title_next` variables that contain the pages +get `page.lower` and `page.higher` variables that contain the pages with previous and next titles, respectively. For example, here is a natural lexical ordering: "bachata, BART, bolero, μ-kernel, meter, Métro, Track-2, Track-3, Track-13, underground". Notice how -special characters and numbers are sorted reasonably. This is better than -the standard sorting: "BART, Métro, Track-13, Track-2, Track-3, bachata, -bolero, meter, underground, μ-kernel". +special characters and numbers are sorted reasonably. [lexical-sort]: https://docs.rs/lexical-sort +### `title_bytes` +Same as `title` except it uses the bytes directly to sort. +Natural sorting treats non-ascii +characters like their closest ascii character. This can lead to unexpected +results for languages with different character sets. The last three characters +of the Swedish alphabet, åäö, for example would be considered by the natural +sort as aao. In that case the standard byte-order sort may be more suitable. + ### `weight` This will be sort all pages by their `weight` field, from lightest weight (at the top of the list) to heaviest (at the bottom of the list). Each -page gets `page.lighter` and `page.heavier` variables that contain the +page gets `page.lower` and `page.higher` variables that contain the pages with lighter and heavier weights, respectively. ### Reversed sorting @@ -190,11 +200,7 @@ pages sorted by weight will be sorted from lightest (at the top) to heaviest (at the bottom); pages sorted by date will be sorted from oldest (at the top) to newest (at the bottom). -`reverse` has no effect on: - -* `page.later` / `page.earlier`, -* `page.title_prev` / `page.title_next`, or -* `page.heavier` / `page.lighter`. +`reverse` has no effect on `page.lower` / `page.higher`. If the section is paginated the `paginate_reversed=true` in the front matter of the relevant section should be set instead of using the filter. diff --git a/docs/content/documentation/content/syntax-highlighting.md b/docs/content/documentation/content/syntax-highlighting.md index dd8fb74a53..d248d3d69b 100644 --- a/docs/content/documentation/content/syntax-highlighting.md +++ b/docs/content/documentation/content/syntax-highlighting.md @@ -171,6 +171,9 @@ If your site source is laid out as follows: you would set your `extra_syntaxes_and_themes` to `["syntaxes", "syntaxes/Sublime-Language1"]` to load `lang1.sublime-syntax` and `lang2.sublime-syntax`. +You can see the list of available themes on the [configuration page](@/documentation/getting-started/configuration.md#syntax-highlighting). + + ## Inline VS classed highlighting If you use a highlighting scheme like @@ -382,5 +385,5 @@ If your site source is laid out as follows: └── ... ``` -you would set your `extra_highlight_themes` to `["highlight_themes", "highlight_themes/MyGroovyTheme"]` to load `theme1.tmTheme` and `theme2.tmTheme`. +you would set your `extra_syntaxes_and_themes` to `["highlight_themes", "highlight_themes/MyGroovyTheme"]` to load `theme1.tmTheme` and `theme2.tmTheme`. Then choose one of them to use, say theme1, by setting `highlight_theme = theme1`. diff --git a/docs/content/documentation/deployment/github-pages.md b/docs/content/documentation/deployment/github-pages.md index 9b6be23322..f8bfa6d133 100644 --- a/docs/content/documentation/deployment/github-pages.md +++ b/docs/content/documentation/deployment/github-pages.md @@ -26,11 +26,11 @@ git submodule add https://github.com/getzola/after-dark.git themes/after-dark Using *Github Actions* for the deployment of your Zola-Page on Github-Pages is pretty easy. You basically need three things: -1. A *Personal access token* to give the *Github Action* the permission to push into your repository. +1. A *Personal access token* to give the *Github Action* the permission to push into your repository ONLY IF you are publishing from another repo 2. Create the *Github Action*. 3. Check the *Github Pages* section in repository settings. -Let's start with the token. +Let's start with the token. Remember, if you are publishing the site on the same repo, you do not need to follow that step. For creating the token either click on [here](https://github.com/settings/tokens) or go to Settings > Developer Settings > Personal access tokens. Under the *Select Scopes* section, give it *repo* permissions and click *Generate token*. Then copy the token, navigate to your repository and add in the Settings tab the *Secret* `TOKEN` and paste your token in it. diff --git a/docs/content/documentation/deployment/netlify.md b/docs/content/documentation/deployment/netlify.md index 022f303740..4304106db3 100644 --- a/docs/content/documentation/deployment/netlify.md +++ b/docs/content/documentation/deployment/netlify.md @@ -13,7 +13,7 @@ Once you are in the admin interface, you can add a site from a Git provider (Git - build command: `zola build` (replace the version number in the variable by the version you want to use) - publish directory: the path to where the `public` directory is - - image selection: `Ubuntu Xenial 16.04 (default)` + - image selection: use the latest - Environment variables: `ZOLA_VERSION` with for example `0.13.0` as value With this setup, your site should be automatically deployed on every commit on master. For `ZOLA_VERSION`, you may diff --git a/docs/content/documentation/getting-started/cli-usage.md b/docs/content/documentation/getting-started/cli-usage.md index 73b0cb2254..842f5b61c4 100644 --- a/docs/content/documentation/getting-started/cli-usage.md +++ b/docs/content/documentation/getting-started/cli-usage.md @@ -74,7 +74,7 @@ You can also specify different addresses for the interface and base_url using `- > By default, devices from the local network **won't** be able to access the served pages. This may be of importance when you want to test page interaction and layout on your mobile device or tablet. If you set the interface to `0.0.0.0` however, devices from your local network will be able to access the served pages by requesting the local ip-address of the machine serving the pages and port used. > -> In order to have everything work correctly, you might also have to alter the `base-url` flag to your local ip. +> In order to have everything work correctly, you might also have to alter the `base-url` flag to your local ip or set it to `/` to use server-base relative paths. Use the `--open` flag to automatically open the locally hosted instance in your web browser. @@ -87,6 +87,7 @@ $ zola serve --port 2000 $ zola serve --interface 0.0.0.0 $ zola serve --interface 0.0.0.0 --port 2000 $ zola serve --interface 0.0.0.0 --base-url 127.0.0.1 +$ zola serve --interface 0.0.0.0 --base-url / $ zola serve --interface 0.0.0.0 --port 2000 --output-dir www/public $ zola serve --open ``` diff --git a/docs/content/documentation/getting-started/configuration.md b/docs/content/documentation/getting-started/configuration.md index 02024f3253..f8f2d473fc 100644 --- a/docs/content/documentation/getting-started/configuration.md +++ b/docs/content/documentation/getting-started/configuration.md @@ -52,7 +52,7 @@ minify_html = false # directory is processed. Defaults to none, which means that all asset files are # copied over to the `public` directory. # Example: -# ignored_content = ["*.{graphml,xlsx}", "temp.*"] +# ignored_content = ["*.{graphml,xlsx}", "temp.*", "**/build_folder"] ignored_content = [] # When set to "true", a feed is automatically generated. @@ -93,8 +93,8 @@ build_search_index = false # When set to "true", all code blocks are highlighted. highlight_code = false -# A list of directories used to search for additional `.sublime-syntax` files. -extra_syntaxes = [] +# A list of directories used to search for additional `.sublime-syntax` and `.tmTheme` files. +extra_syntaxes_and_themes = [] # The theme to use for code highlighting. # See below for list of allowed values. @@ -130,6 +130,12 @@ skip_anchor_prefixes = [ "https://caniuse.com/", ] +# Treat internal link problems as either "error" or "warn", default is "error" +internal_level = "error" + +# Treat external link problems as either "error" or "warn", default is "error" +external_level = "error" + # Various slugification strategies, see below for details # Defaults to everything being a slug [slugify] diff --git a/docs/content/documentation/getting-started/installation.md b/docs/content/documentation/getting-started/installation.md index 2bd94a622b..c2e3357db1 100644 --- a/docs/content/documentation/getting-started/installation.md +++ b/docs/content/documentation/getting-started/installation.md @@ -111,26 +111,27 @@ Zola is available on [the GitHub registry](https://github.com/getzola/zola/pkgs/ It has no `latest` tag, you will need to specify a [specific version to pull](https://github.com/getzola/zola/pkgs/container/zola/versions). ```sh -$ docker pull ghcr.io/getzola/zola:v0.15.1 +$ docker pull ghcr.io/getzola/zola:v0.16.0 ``` #### Build ```sh -$ docker run -u "$(id -u):$(id -g)" -v $PWD:/app --workdir /app ghcr.io/getzola/zola:v0.15.1 build +$ docker run -u "$(id -u):$(id -g)" -v $PWD:/app --workdir /app ghcr.io/getzola/zola:v0.16.0 build ``` #### Serve ```sh -$ docker run -u "$(id -u):$(id -g)" -v $PWD:/app --workdir /app -p 8080:8080 ghcr.io/getzola/zola:v0.15.1 serve --interface 0.0.0.0 --port 8080 --base-url localhost +$ docker run -u "$(id -u):$(id -g)" -v $PWD:/app --workdir /app -p 8080:8080 ghcr.io/getzola/zola:v0.16.0 serve --interface 0.0.0.0 --port 8080 --base-url localhost ``` You can now browse http://localhost:8080. > To enable live browser reload, you may have to bind to port 1024. Zola searches for an open > port between 1024 and 9000 for live reload. The new docker command would be -> `$ docker run -u "$(id -u):$(id -g)" -v $PWD:/app --workdir /app -p 8080:8080 -p 1024:1024 ghcr.io/getzola/zola:v0.15.1 serve --interface 0.0.0.0 --port 8080 --base-url localhost` +> `$ docker run -u "$(id -u):$(id -g)" -v $PWD:/app --workdir /app -p 8080:8080 -p 1024:1024 ghcr.io/getzola/zola:v0.16.0 serve --interface 0.0.0.0 --port 8080 --base-url localhost` + ## Windows diff --git a/docs/content/documentation/getting-started/overview.md b/docs/content/documentation/getting-started/overview.md index ad31fdd89a..3b93a06013 100644 --- a/docs/content/documentation/getting-started/overview.md +++ b/docs/content/documentation/getting-started/overview.md @@ -155,6 +155,7 @@ Let's now create some more templates. In the `templates` directory, create a `bl {{ section.title }}
    + {% for page in section.pages %}
  • {{ page.title }}
  • {% endfor %} diff --git a/docs/content/documentation/templates/overview.md b/docs/content/documentation/templates/overview.md index b33f3ebb2b..0e1f17dfaf 100644 --- a/docs/content/documentation/templates/overview.md +++ b/docs/content/documentation/templates/overview.md @@ -258,7 +258,10 @@ The method returns a map containing `width`, `height` and `format` (the lowercas ``` ### `load_data` -Loads data from a file or URL. Supported file types include *toml*, *json*, *csv* and *bibtex* and only supports UTF-8 encoding. + +Loads data from a file, URL, or string literal. Supported file types include *toml*, *json*, *csv*, *bibtex*, *yaml* +and *xml* and only supports UTF-8 encoding. + Any other file type will be loaded as plain text. The `path` argument specifies the path to a local data file, according to the [File Searching Logic](@/documentation/templates/overview.md#file-searching-logic). @@ -273,6 +276,15 @@ Alternatively, the `url` argument specifies the location of a remote URL to load {% set data = load_data(url="https://en.wikipedia.org/wiki/Commune_of_Paris") %} ``` +Alternatively, the `literal` argument specifies an object literal. Note: if the `format` argument is not specified, then plain text will be what is assumed. + +```jinja2 +{% set data = load_data(literal='{"name": "bob"}', format="json") %} +{{ data["name"] }} +``` + +*Note: the `required` parameter has no effect when used in combination with the `literal` argument.* + The optional `required` boolean argument can be set to false so that missing data (HTTP error or local file not found) does not produce an error, but returns a null value instead. However, permission issues with a local file and invalid data that could not be parsed to the requested data format will still produce an error even with `required=false`. The snippet below outputs the HTML from a Wikipedia page, or "No data found" if the page was not reachable, or did not return a successful HTTP code: @@ -282,18 +294,18 @@ The snippet below outputs the HTML from a Wikipedia page, or "No data found" if {% if data %}{{ data | safe }}{% else %}No data found{% endif %} ``` -The optional `format` argument allows you to specify and override which data type is contained -within the specified file or URL. Valid entries are `toml`, `json`, `csv`, `bibtex` -or `plain`. If the `format` argument isn't specified, then the path extension is used. +The optional `format` argument allows you to specify and override which data type is contained within the specified file or URL. +Valid entries are `toml`, `json`, `csv`, `bibtex`, `yaml`, `xml` or `plain`. If the `format` argument isn't specified, then the +path extension is used. In the case of a literal, `plain` is assumed if `format` is unspecified. ```jinja2 {% set data = load_data(path="content/blog/story/data.txt", format="json") %} ``` -Use the `plain` format for when your file has a toml/json/csv extension but you want to load it as plain text. +Use the `plain` format for when your file has a supported extension but you want to load it as plain text. -For *toml* and *json*, the data is loaded into a structure matching the original data file; +For *toml*, *json*, *yaml* and *xml*, the data is loaded into a structure matching the original data file; however, for *csv* there is no native notion of such a structure. Instead, the data is separated into a data structure containing *headers* and *records*. See the example below to see how this works. diff --git a/docs/content/documentation/templates/pages-sections.md b/docs/content/documentation/templates/pages-sections.md index 17fa0f04f8..8486cd6a82 100644 --- a/docs/content/documentation/templates/pages-sections.md +++ b/docs/content/documentation/templates/pages-sections.md @@ -34,14 +34,10 @@ toc: Array
    , word_count: Number; // Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time reading_time: Number; -// `earlier` and `later` are only populated if the section variable `sort_by` is set to `date` -// and only set when rendering the page itself -earlier: Page?; -later: Page?; -// `heavier` and `lighter` are only populated if the section variable `sort_by` is set to `weight` -// and only set when rendering the page itself -heavier: Page?; -lighter: Page?; +// earlier / ligher +lower: Page?; +// later / heavier +higher: Page?; // Year/month/day is only set if the page has a date and month/day are 1-indexed year: Number?; month: Number?; diff --git a/docs/content/documentation/templates/robots.md b/docs/content/documentation/templates/robots.md index 40a108959b..c93ff73bbb 100644 --- a/docs/content/documentation/templates/robots.md +++ b/docs/content/documentation/templates/robots.md @@ -11,6 +11,7 @@ and the default is what most sites want: ```jinja2 User-agent: * +Disallow: Allow: / Sitemap: {{/* get_url(path="sitemap.xml") */}} ``` diff --git a/docs/content/documentation/templates/taxonomies.md b/docs/content/documentation/templates/taxonomies.md index 1cc8220d2b..064623b71b 100644 --- a/docs/content/documentation/templates/taxonomies.md +++ b/docs/content/documentation/templates/taxonomies.md @@ -29,8 +29,7 @@ name: String, paginate_by: Number?; paginate_path: String?; feed: Bool; -lang: String; -permalink: String; +render: Bool; ``` diff --git a/docs/content/themes/anpu/index.md b/docs/content/themes/anpu/index.md index 2e548b405e..4fe3707498 100644 --- a/docs/content/themes/anpu/index.md +++ b/docs/content/themes/anpu/index.md @@ -81,6 +81,6 @@ Example: anpu_date_format = "%e %B %Y" ``` -The formatting uses the standart `date` filter in Tera. The date format options you can use are listed in the [chrono crate documentation](https://tera.netlify.app/docs/#date). +The formatting uses the standard `date` filter in Tera. The date format options you can use are listed in the [chrono crate documentation](https://tera.netlify.app/docs/#date). - \ No newline at end of file + diff --git a/docs/content/themes/simplify/index.md b/docs/content/themes/simplify/index.md deleted file mode 100644 index bf19159613..0000000000 --- a/docs/content/themes/simplify/index.md +++ /dev/null @@ -1,88 +0,0 @@ - -+++ -title = "simplify" -description = "A minimal blog theme built with simple.css" -template = "theme.html" -date = 2022-05-02T15:40:10-07:00 - -[extra] -created = 2022-05-02T15:40:10-07:00 -updated = 2022-05-02T15:40:10-07:00 -repository = "https://github.com/tarunjana/simplify.git" -homepage = "https://github.com/tarunjana/simplify" -minimum_version = "0.15.3" -license = "MIT" -demo = "https://simplify-zola.netlify.app" - -[extra.author] -name = "Tarun Jana" -homepage = "https://www.tarunjana.in/" -+++ - -Simplify is a minimal [Zola](https://www.getzola.org/) theme built with -[Simple.css](https://simplecss.org/). - -## Demo - -To have a taste of what Simplify is, please click [here](https://simplify-zola.netlify.app). - -## Screenshot - -![Screenshot](/screenshot.png) - -## Installation - -Install Zola in your machine as described in the [official docs](https://www.getzola.org/documentation/getting-started/installation/) and follow the steps below to use Simplify theme in your site. - -1. Create a new Zola site (if you don't have a Zola site already): - -```bash -zola init my-website -``` - -2. Go to your site root: - -```bash -cd my-website -``` - -3. Initialize an empty git repository: - -```bash -git init -``` - -4. Add simplify theme as a git submodule: - -```bash -git submodule add https://github.com/tarunjana/simplify.git themes/simplify -``` - -5. Add the theme in your `config.toml`: - -```toml -theme = "simplify" -``` - -## Features - -1. Auto dark/light mode according to system preference -2. Inject anything in the `...` tag. -3. Math typesetting with KaTeX. - -## Documentation - -Please see the [wiki](https://github.com/tarunjana/simplify/wiki). - -## Credit - -This theme is the product of some awesome projects listed below: - -- [Zola](https://www.getzola.org/) -- [Simple.css](https://simplecss.org/) -- [KaTeX](https://katex.org/) - -## License - -[MIT](https://mit-license.org) - \ No newline at end of file diff --git a/docs/content/themes/simplify/screenshot.png b/docs/content/themes/simplify/screenshot.png deleted file mode 100644 index fd7fd8a79f..0000000000 Binary files a/docs/content/themes/simplify/screenshot.png and /dev/null differ diff --git a/docs/static/search.js b/docs/static/search.js index 68c63007ae..408156555d 100644 --- a/docs/static/search.js +++ b/docs/static/search.js @@ -151,6 +151,7 @@ function initSearch() { } $searchResults.style.display = term === "" ? "none" : "block"; $searchResultsItems.innerHTML = ""; + currentTerm = term; if (term === "") { return; } @@ -161,7 +162,6 @@ function initSearch() { return; } - currentTerm = term; for (var i = 0; i < Math.min(results.length, MAX_ITEMS); i++) { var item = document.createElement("li"); item.innerHTML = formatSearchResultItem(results[i], term.split(" ")); diff --git a/netlify.toml b/netlify.toml index 4b8c3a7e28..4169c02cc7 100644 --- a/netlify.toml +++ b/netlify.toml @@ -4,7 +4,7 @@ command = "zola build" [build.environment] - ZOLA_VERSION = "0.14.0" + ZOLA_VERSION = "0.15.3" [context.deploy-preview] command = "zola build --base-url $DEPLOY_PRIME_URL" diff --git a/snapcraft.yaml b/snapcraft.yaml index 52ca3a995f..a59ba432ec 100644 --- a/snapcraft.yaml +++ b/snapcraft.yaml @@ -1,5 +1,5 @@ name: zola -version: 0.15.3 +version: 0.16.0 summary: A fast static site generator in a single binary with everything built-in. description: | A fast static site generator in a single binary with everything built-in. @@ -21,7 +21,7 @@ parts: zola: source-type: git source: https://github.com/getzola/zola.git - source-tag: v0.15.3 + source-tag: v0.16.0 plugin: rust rust-channel: stable build-packages: diff --git a/src/cli.rs b/src/cli.rs index e6d6332fff..82b1f9084e 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,102 +1,86 @@ -use clap::{crate_authors, crate_description, crate_version, App, AppSettings, Arg, SubCommand}; - -pub fn build_cli() -> App<'static, 'static> { - App::new("zola") - .version(crate_version!()) - .author(crate_authors!()) - .about(crate_description!()) - .setting(AppSettings::SubcommandRequiredElseHelp) - .arg( - Arg::with_name("root") - .short("r") - .long("root") - .takes_value(true) - .default_value(".") - .help("Directory to use as root of project") - ) - .arg( - Arg::with_name("config") - .short("c") - .long("config") - .takes_value(true) - .help("Path to a config file other than config.toml in the root of project") - ) - .subcommands(vec![ - SubCommand::with_name("init") - .about("Create a new Zola project") - .args(&[ - Arg::with_name("name") - .default_value(".") - .help("Name of the project. Will create a new directory with that name in the current directory"), - Arg::with_name("force") - .short("f") - .long("force") - .takes_value(false) - .help("Force creation of project even if directory is non-empty") - ]), - SubCommand::with_name("build") - .about("Deletes the output directory if there is one and builds the site") - .args(&[ - Arg::with_name("base_url") - .short("u") - .long("base-url") - .takes_value(true) - .help("Force the base URL to be that value (default to the one in config.toml)"), - Arg::with_name("output_dir") - .short("o") - .long("output-dir") - .takes_value(true) - .help("Outputs the generated site in the given path (by default 'public' dir in project root)"), - Arg::with_name("drafts") - .long("drafts") - .takes_value(false) - .help("Include drafts when loading the site"), - ]), - SubCommand::with_name("serve") - .about("Serve the site. Rebuild and reload on change automatically") - .args(&[ - Arg::with_name("interface") - .short("i") - .long("interface") - .takes_value(true) - .help("Interface to bind on (default: 127.0.0.1)"), - Arg::with_name("port") - .short("p") - .long("port") - .takes_value(true) - .help("Which port to use (default: 1111)"), - Arg::with_name("output_dir") - .short("o") - .long("output-dir") - .takes_value(true) - .help("Outputs assets of the generated site in the given path (by default 'public' dir in project root). HTML/XML will be stored in memory."), - Arg::with_name("base_url") - .short("u") - .long("base-url") - .takes_value(true) - .help("Changes the base_url (default: 127.0.0.1)"), - Arg::with_name("drafts") - .long("drafts") - .takes_value(false) - .help("Include drafts when loading the site"), - Arg::with_name("open") - .short("O") - .long("open") - .takes_value(false) - .help("Open site in the default browser"), - Arg::with_name("fast") - .short("f") - .long("fast") - .takes_value(false) - .help("Only rebuild the minimum on change - useful when working on a specific page/section"), - ]), - SubCommand::with_name("check") - .about("Try building the project without rendering it. Checks links") - .args(&[ - Arg::with_name("drafts") - .long("drafts") - .takes_value(false) - .help("Include drafts when loading the site"), - ]) - ]) +use std::path::PathBuf; + +use clap::{Parser, Subcommand}; + +#[derive(Parser)] +#[clap(version, author, about)] +pub struct Cli { + /// Directory to use as root of project + #[clap(short = 'r', long, default_value = ".")] + pub root: PathBuf, + + /// Path to a config file other than config.toml in the root of project + #[clap(short = 'c', long, default_value = "config.toml")] + pub config: PathBuf, + + #[clap(subcommand)] + pub command: Command, +} + +#[derive(Subcommand)] +pub enum Command { + /// Create a new Zola project + Init { + /// Name of the project. Will create a new directory with that name in the current directory + #[clap(default_value = ".")] + name: String, + + /// Force creation of project even if directory is non-empty + #[clap(short = 'f', long)] + force: bool, + }, + + /// Deletes the output directory if there is one and builds the site + Build { + /// Force the base URL to be that value (defaults to the one in config.toml) + #[clap(short = 'u', long)] + base_url: Option, + + /// Outputs the generated site in the given path (by default 'public' dir in project root) + #[clap(short = 'o', long)] + output_dir: Option, + + /// Include drafts when loading the site + #[clap(long)] + drafts: bool, + }, + + /// Serve the site. Rebuild and reload on change automatically + Serve { + /// Interface to bind on + #[clap(short = 'i', long, default_value = "127.0.0.1")] + interface: String, + + /// Which port to use + #[clap(short = 'p', long, default_value_t = 1111)] + port: u16, + + /// Outputs assets of the generated site in the given path (by default 'public' dir in project root). + /// HTML/XML will be stored in memory. + #[clap(short = 'o', long)] + output_dir: Option, + + /// Changes the base_url + #[clap(short = 'u', long, default_value = "127.0.0.1")] + base_url: String, + + /// Include drafts when loading the site + #[clap(long)] + drafts: bool, + + /// Open site in the default browser + #[clap(short = 'O', long)] + open: bool, + + /// Only rebuild the minimum on change - useful when working on a specific page/section + #[clap(short = 'f', long)] + fast: bool, + }, + + /// Try to build the project without rendering it. Checks links + Check { + /// Include drafts when loading the site + #[clap(long)] + drafts: bool, + }, } diff --git a/src/cmd/build.rs b/src/cmd/build.rs index 21dd456a1f..6bfe603d88 100644 --- a/src/cmd/build.rs +++ b/src/cmd/build.rs @@ -3,7 +3,7 @@ use std::path::Path; use errors::{Error, Result}; use site::Site; -use crate::console; +use crate::messages; use crate::prompt::ask_bool_timeout; const BUILD_PROMPT_TIMEOUT_MILLIS: u64 = 10_000; @@ -47,7 +47,7 @@ pub fn build( site.include_drafts(); } site.load()?; - console::notify_site_size(&site); - console::warn_about_ignored_pages(&site); + messages::notify_site_size(&site); + messages::warn_about_ignored_pages(&site); site.build() } diff --git a/src/cmd/check.rs b/src/cmd/check.rs index 1ad57b925d..46522deab9 100644 --- a/src/cmd/check.rs +++ b/src/cmd/check.rs @@ -3,7 +3,7 @@ use std::path::{Path, PathBuf}; use errors::Result; use site::Site; -use crate::console; +use crate::messages; pub fn check( root_dir: &Path, @@ -23,7 +23,7 @@ pub fn check( site.include_drafts(); } site.load()?; - console::check_site_summary(&site); - console::warn_about_ignored_pages(&site); + messages::check_site_summary(&site); + messages::warn_about_ignored_pages(&site); Ok(()) } diff --git a/src/cmd/init.rs b/src/cmd/init.rs index 579e7b7e67..3af0f89f9b 100644 --- a/src/cmd/init.rs +++ b/src/cmd/init.rs @@ -4,7 +4,6 @@ use std::path::Path; use errors::{bail, Result}; use utils::fs::create_file; -use crate::console; use crate::prompt::{ask_bool, ask_url}; const CONFIG: &str = r#" diff --git a/src/cmd/serve.rs b/src/cmd/serve.rs index dd4a4b446f..0242a9a075 100644 --- a/src/cmd/serve.rs +++ b/src/cmd/serve.rs @@ -23,7 +23,7 @@ use std::fs::{read_dir, remove_dir_all}; use std::net::{SocketAddrV4, TcpListener}; -use std::path::{Path, PathBuf}; +use std::path::{Path, PathBuf, MAIN_SEPARATOR}; use std::sync::mpsc::channel; use std::thread; use std::time::{Duration, Instant}; @@ -33,20 +33,23 @@ use hyper::server::Server; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Method, Request, Response, StatusCode}; use mime_guess::from_path as mimetype_from_path; +use time::macros::format_description; +use time::{OffsetDateTime, UtcOffset}; -use chrono::prelude::*; +use libs::percent_encoding; +use libs::serde_json; use notify::{watcher, RecursiveMode, Watcher}; use ws::{Message, Sender, WebSocket}; -use errors::{Error as ZolaError, Result}; -use globset::GlobSet; +use errors::{anyhow, Context, Result}; +use libs::globset::GlobSet; +use libs::relative_path::{RelativePath, RelativePathBuf}; use pathdiff::diff_paths; -use relative_path::{RelativePath, RelativePathBuf}; use site::sass::compile_sass; use site::{Site, SITE_CONTENT}; use utils::fs::copy_file; -use crate::console; +use crate::messages; use std::ffi::OsStr; #[derive(Debug, PartialEq)] @@ -225,7 +228,7 @@ fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &st )) .unwrap(); } - Err(e) => console::unravel_errors("Failed to build the site", &e), + Err(e) => messages::unravel_errors("Failed to build the site", &e), } } @@ -243,14 +246,18 @@ fn create_new_site( SITE_CONTENT.write().unwrap().clear(); let mut site = Site::new(root_dir, config_file)?; - - let base_address = format!("{}:{}", base_url, interface_port); let address = format!("{}:{}", interface, interface_port); - let base_url = if site.config.base_url.ends_with('/') { - format!("http://{}/", base_address) + let base_url = if base_url == "/" { + String::from("/") } else { - format!("http://{}", base_address) + let base_address = format!("{}:{}", base_url, interface_port); + + if site.config.base_url.ends_with('/') { + format!("http://{}/", base_address) + } else { + format!("http://{}", base_address) + } }; site.enable_serve_mode(); @@ -267,8 +274,8 @@ fn create_new_site( } else { site.enable_live_reload(interface_port); } - console::notify_site_size(&site); - console::warn_about_ignored_pages(&site); + messages::notify_site_size(&site); + messages::warn_about_ignored_pages(&site); site.build()?; Ok((site, address)) } @@ -284,6 +291,7 @@ pub fn serve( open: bool, include_drafts: bool, fast_rebuild: bool, + utc_offset: UtcOffset, ) -> Result<()> { let start = Instant::now(); let (mut site, address) = create_new_site( @@ -296,19 +304,20 @@ pub fn serve( include_drafts, None, )?; - console::report_elapsed_time(start); + messages::report_elapsed_time(start); // Stop right there if we can't bind to the address let bind_address: SocketAddrV4 = match address.parse() { Ok(a) => a, - Err(_) => return Err(format!("Invalid address: {}.", address).into()), + Err(_) => return Err(anyhow!("Invalid address: {}.", address)), }; if (TcpListener::bind(&bind_address)).is_err() { - return Err(format!("Cannot start server on address {}.", address).into()); + return Err(anyhow!("Cannot start server on address {}.", address)); } let config_path = PathBuf::from(config_file); - let config_path_rel = diff_paths(&config_path, &root_dir).unwrap_or(config_path.clone()); + let config_path_rel = + diff_paths(&config_path, &root_dir).unwrap_or_else(|| config_path.clone()); // An array of (path, WatchMode) where the path should be watched for changes, // and the WatchMode value indicates whether this file/folder must exist for @@ -342,7 +351,7 @@ pub fn serve( if should_watch { watcher .watch(root_dir.join(entry), RecursiveMode::Recursive) - .map_err(|e| ZolaError::chain(format!("Can't watch `{}` for changes in folder `{}`. Does it exist, and do you have correct permissions?", entry, root_dir.display()), e))?; + .with_context(|| format!("Can't watch `{}` for changes in folder `{}`. Does it exist, and do you have correct permissions?", entry, root_dir.display()))?; watchers.push(entry.to_string()); } } @@ -410,7 +419,7 @@ pub fn serve( let ws_server = ws_server .bind(&*ws_address) - .map_err(|_| format!("Cannot bind to address {} for the websocket server. Maybe the port is already in use?", &ws_address))?; + .map_err(|_| anyhow!("Cannot bind to address {} for the websocket server. Maybe the port is already in use?", &ws_address))?; thread::spawn(move || { ws_server.run().unwrap(); @@ -419,7 +428,12 @@ pub fn serve( broadcaster }; - println!("Listening for changes in {}{{{}}}", root_dir.display(), watchers.join(",")); + println!( + "Listening for changes in {}{}{{{}}}", + root_dir.display(), + MAIN_SEPARATOR, + watchers.join(",") + ); println!("Press Ctrl+C to stop\n"); // Delete the output folder on ctrl+C @@ -495,7 +509,7 @@ pub fn serve( Some(s) } Err(e) => { - console::unravel_errors("Failed to build the site", &e); + messages::unravel_errors("Failed to build the site", &e); None } }; @@ -521,10 +535,17 @@ pub fn serve( if path.is_dir() && is_folder_empty(&path) { continue; } - println!( - "Change detected @ {}", - Local::now().format("%Y-%m-%d %H:%M:%S").to_string() - ); + + let format = + format_description!("[year]-[month]-[day] [hour]:[minute]:[second]"); + let current_time = + OffsetDateTime::now_utc().to_offset(utc_offset).format(&format); + if let Ok(time_str) = current_time { + println!("Change detected @ {}", time_str); + } else { + // if formatting fails for some reason + println!("Change detected"); + }; let start = Instant::now(); match detect_change_kind(root_dir, &path, &config_path) { @@ -544,7 +565,7 @@ pub fn serve( } else { // an asset changed? a folder renamed? // should we make it smarter so it doesn't reload the whole site? - Err("dummy".into()) + Err(anyhow!("dummy")) }; if res.is_err() { @@ -607,7 +628,7 @@ pub fn serve( } } }; - console::report_elapsed_time(start); + messages::report_elapsed_time(start); } _ => {} } diff --git a/src/console.rs b/src/console.rs deleted file mode 100644 index 84fcde0f9a..0000000000 --- a/src/console.rs +++ /dev/null @@ -1,130 +0,0 @@ -use lazy_static::lazy_static; - -use std::env; -use std::error::Error as StdError; -use std::io::Write; -use std::time::Instant; - -use chrono::Duration; -use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; - -use errors::Error; -use site::Site; - -lazy_static! { - /// Termcolor color choice. - /// We do not rely on ColorChoice::Auto behavior - /// as the check is already performed by has_color. - static ref COLOR_CHOICE: ColorChoice = - if has_color() { - ColorChoice::Always - } else { - ColorChoice::Never - }; -} - -pub fn info(message: &str) { - colorize(message, ColorSpec::new().set_bold(true)); -} - -pub fn warn(message: &str) { - colorize(message, ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow))); -} - -pub fn success(message: &str) { - colorize(message, ColorSpec::new().set_bold(true).set_fg(Some(Color::Green))); -} - -pub fn error(message: &str) { - colorize(message, ColorSpec::new().set_bold(true).set_fg(Some(Color::Red))); -} - -/// Print a colorized message to stdout -fn colorize(message: &str, color: &ColorSpec) { - let mut stdout = StandardStream::stdout(*COLOR_CHOICE); - stdout.set_color(color).unwrap(); - write!(&mut stdout, "{}", message).unwrap(); - stdout.set_color(&ColorSpec::new()).unwrap(); - writeln!(&mut stdout).unwrap(); -} - -/// Display in the console the number of pages/sections in the site -pub fn notify_site_size(site: &Site) { - let library = site.library.read().unwrap(); - println!( - "-> Creating {} pages ({} orphan) and {} sections", - library.pages().len(), - library.get_all_orphan_pages().len(), - library.sections().len() - 1, // -1 since we do not count the index as a section there - ); -} - -/// Display in the console only the number of pages/sections in the site -pub fn check_site_summary(site: &Site) { - let library = site.library.read().unwrap(); - let orphans = library.get_all_orphan_pages(); - println!( - "-> Site content: {} pages ({} orphan), {} sections", - library.pages().len(), - orphans.len(), - library.sections().len() - 1, // -1 since we do not count the index as a section there - ); - - for orphan in orphans { - warn(&format!("Orphan page found: {}", orphan.path)); - } -} - -/// Display a warning in the console if there are ignored pages in the site -pub fn warn_about_ignored_pages(site: &Site) { - let library = site.library.read().unwrap(); - let ignored_pages: Vec<_> = library - .sections_values() - .iter() - .flat_map(|s| s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone())) - .collect(); - - if !ignored_pages.is_empty() { - warn(&format!( - "{} page(s) ignored (missing date or weight in a sorted section):", - ignored_pages.len() - )); - for path in ignored_pages { - warn(&format!("- {}", path.display())); - } - } -} - -/// Print the time elapsed rounded to 1 decimal -pub fn report_elapsed_time(instant: Instant) { - let duration_ms = Duration::from_std(instant.elapsed()).unwrap().num_milliseconds() as f64; - - if duration_ms < 1000.0 { - success(&format!("Done in {}ms.\n", duration_ms)); - } else { - let duration_sec = duration_ms / 1000.0; - success(&format!("Done in {:.1}s.\n", ((duration_sec * 10.0).round() / 10.0))); - } -} - -/// Display an error message and the actual error(s) -pub fn unravel_errors(message: &str, error: &Error) { - if !message.is_empty() { - self::error(message); - } - self::error(&format!("Error: {}", error)); - let mut cause = error.source(); - while let Some(e) = cause { - self::error(&format!("Reason: {}", e)); - cause = e.source(); - } -} - -/// Check whether to output colors -fn has_color() -> bool { - let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0" - && env::var("NO_COLOR").is_err(); - let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_| "0".to_string()) != "0"; - - force_colors || use_colors && atty::is(atty::Stream::Stdout) -} diff --git a/src/main.rs b/src/main.rs index 00991967cd..22fa101cec 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,124 +1,104 @@ -use std::env; use std::path::{Path, PathBuf}; use std::time::Instant; +use cli::{Cli, Command}; use utils::net::{get_available_port, port_is_available}; +use clap::Parser; +use time::UtcOffset; + mod cli; mod cmd; -mod console; +mod messages; mod prompt; -fn main() { - let matches = cli::build_cli().get_matches(); +fn get_config_file_path(dir: &Path, config_path: &Path) -> (PathBuf, PathBuf) { + let root_dir = dir + .ancestors() + .find(|a| a.join(&config_path).exists()) + .unwrap_or_else(|| panic!("could not find directory containing config file")); - let root_dir = match matches.value_of("root").unwrap() { - "." => env::current_dir().unwrap(), - path => PathBuf::from(path) - .canonicalize() - .unwrap_or_else(|_| panic!("Cannot find root directory: {}", path)), - }; - let config_file = match matches.value_of("config") { - Some(path) => PathBuf::from(path) - .canonicalize() - .unwrap_or_else(|_| panic!("Cannot find config file: {}", path)), - None => root_dir.join("config.toml"), - }; + // if we got here we found root_dir so config file should exist so we can unwrap safely + let config_file = root_dir + .join(&config_path) + .canonicalize() + .unwrap_or_else(|_| panic!("could not find directory containing config file")); + (root_dir.to_path_buf(), config_file) +} - match matches.subcommand() { - ("init", Some(matches)) => { - let force = matches.is_present("force"); - match cmd::create_new_project(matches.value_of("name").unwrap(), force) { - Ok(()) => (), - Err(e) => { - console::unravel_errors("Failed to create the project", &e); - ::std::process::exit(1); - } - }; +fn main() { + let cli = Cli::parse(); + let cli_dir: PathBuf = cli.root.canonicalize().unwrap_or_else(|_| { + panic!("Could not find canonical path of root dir: {}", cli.root.display()) + }); + + match cli.command { + Command::Init { name, force } => { + if let Err(e) = cmd::create_new_project(&name, force) { + messages::unravel_errors("Failed to create the project", &e); + std::process::exit(1); + } } - ("build", Some(matches)) => { + Command::Build { base_url, output_dir, drafts } => { console::info("Building site..."); let start = Instant::now(); - let output_dir = matches.value_of("output_dir").map(|output_dir| Path::new(output_dir)); + let (root_dir, config_file) = get_config_file_path(&cli_dir, &cli.config); match cmd::build( &root_dir, &config_file, - matches.value_of("base_url"), - output_dir, - matches.is_present("drafts"), + base_url.as_deref(), + output_dir.as_deref(), + drafts, ) { - Ok(()) => console::report_elapsed_time(start), + Ok(()) => messages::report_elapsed_time(start), Err(e) => { - console::unravel_errors("Failed to build the site", &e); - ::std::process::exit(1); + messages::unravel_errors("Failed to build the site", &e); + std::process::exit(1); } - }; + } } - ("serve", Some(matches)) => { - let interface = matches.value_of("interface").unwrap_or("127.0.0.1"); - let mut port: u16 = match matches.value_of("port").unwrap_or("1111").parse() { - Ok(x) => x, - Err(_) => { - console::error("The request port needs to be an integer"); - ::std::process::exit(1); - } - }; - let open = matches.is_present("open"); - let include_drafts = matches.is_present("drafts"); - let fast = matches.is_present("fast"); - - // Default one + Command::Serve { interface, mut port, output_dir, base_url, drafts, open, fast } => { if port != 1111 && !port_is_available(port) { console::error("The requested port is not available"); - ::std::process::exit(1); + std::process::exit(1); } if !port_is_available(port) { - port = if let Some(p) = get_available_port(1111) { - p - } else { - console::error("No port available."); - ::std::process::exit(1); - } + port = get_available_port(1111).unwrap_or_else(|| { + console::error("No port available"); + std::process::exit(1); + }); } - let output_dir = matches.value_of("output_dir").map(|output_dir| Path::new(output_dir)); - let base_url = matches.value_of("base_url").unwrap_or("127.0.0.1"); + + let (root_dir, config_file) = get_config_file_path(&cli_dir, &cli.config); console::info("Building site..."); - match cmd::serve( + if let Err(e) = cmd::serve( &root_dir, - interface, + &interface, port, - output_dir, - base_url, + output_dir.as_deref(), + &base_url, &config_file, open, - include_drafts, + drafts, fast, + UtcOffset::current_local_offset().unwrap_or(UtcOffset::UTC), ) { - Ok(()) => (), - Err(e) => { - console::unravel_errors("", &e); - ::std::process::exit(1); - } - }; + messages::unravel_errors("Failed to serve the site", &e); + std::process::exit(1); + } } - ("check", Some(matches)) => { + Command::Check { drafts } => { console::info("Checking site..."); let start = Instant::now(); - match cmd::check( - &root_dir, - &config_file, - matches.value_of("base_path"), - matches.value_of("base_url"), - matches.is_present("drafts"), - ) { - Ok(()) => console::report_elapsed_time(start), + let (root_dir, config_file) = get_config_file_path(&cli_dir, &cli.config); + match cmd::check(&root_dir, &config_file, None, None, drafts) { + Ok(()) => messages::report_elapsed_time(start), Err(e) => { - console::unravel_errors("Failed to check the site", &e); - ::std::process::exit(1); + messages::unravel_errors("Failed to check the site", &e); + std::process::exit(1); } - }; + } } - _ => unreachable!(), } } diff --git a/src/messages.rs b/src/messages.rs new file mode 100644 index 0000000000..1ad4d7d410 --- /dev/null +++ b/src/messages.rs @@ -0,0 +1,79 @@ +use libs::time::Duration; +use std::convert::TryInto; +use std::time::Instant; + +use errors::Error; +use site::Site; + +/// Display in the console the number of pages/sections in the site +pub fn notify_site_size(site: &Site) { + let library = site.library.read().unwrap(); + println!( + "-> Creating {} pages ({} orphan) and {} sections", + library.pages.len(), + library.get_all_orphan_pages().len(), + library.sections.len() - 1, // -1 since we do not count the index as a section there + ); +} + +/// Display in the console only the number of pages/sections in the site +pub fn check_site_summary(site: &Site) { + let library = site.library.read().unwrap(); + let orphans = library.get_all_orphan_pages(); + println!( + "-> Site content: {} pages ({} orphan), {} sections", + library.pages.len(), + orphans.len(), + library.sections.len() - 1, // -1 since we do not count the index as a section there + ); + + for orphan in orphans { + console::warn(&format!("Orphan page found: {}", orphan.path)); + } +} + +/// Display a warning in the console if there are ignored pages in the site +pub fn warn_about_ignored_pages(site: &Site) { + let library = site.library.read().unwrap(); + let ignored_pages: Vec<_> = library + .sections + .values() + .flat_map(|s| s.ignored_pages.iter().map(|k| library.pages[k].file.path.clone())) + .collect(); + + if !ignored_pages.is_empty() { + console::warn(&format!( + "{} page(s) ignored (missing date or weight in a sorted section):", + ignored_pages.len() + )); + for path in ignored_pages { + console::warn(&format!("- {}", path.display())); + } + } +} + +/// Print the time elapsed rounded to 1 decimal +pub fn report_elapsed_time(instant: Instant) { + let duration: Duration = instant.elapsed().try_into().unwrap(); + let duration_ms = duration.whole_milliseconds() as f64; + + if duration_ms < 1000.0 { + console::success(&format!("Done in {}ms.\n", duration_ms)); + } else { + let duration_sec = duration_ms / 1000.0; + console::success(&format!("Done in {:.1}s.\n", ((duration_sec * 10.0).round() / 10.0))); + } +} + +/// Display an error message and the actual error(s) +pub fn unravel_errors(message: &str, error: &Error) { + if !message.is_empty() { + console::error(message); + } + console::error(&error.to_string()); + let mut cause = error.source(); + while let Some(e) = cause { + console::error(&format!("Reason: {}", e)); + cause = e.source(); + } +} diff --git a/src/prompt.rs b/src/prompt.rs index 23a993ecd1..2820f48f79 100644 --- a/src/prompt.rs +++ b/src/prompt.rs @@ -1,10 +1,9 @@ use std::io::{self, BufRead, Write}; use std::time::Duration; -use url::Url; +use libs::url::Url; -use crate::console; -use errors::Result; +use errors::{anyhow, Result}; /// Wait for user input and return what they typed fn read_line() -> Result { @@ -14,7 +13,7 @@ fn read_line() -> Result { lines .next() .and_then(|l| l.ok()) - .ok_or_else(|| "unable to read from stdin for confirmation".into()) + .ok_or_else(|| anyhow!("unable to read from stdin for confirmation")) } /// Ask a yes/no question to the user diff --git a/sublime/syntaxes/newlines.packdump b/sublime/syntaxes/newlines.packdump index 14f7026990..26dbb77570 100644 Binary files a/sublime/syntaxes/newlines.packdump and b/sublime/syntaxes/newlines.packdump differ diff --git a/sublime/themes/all.themedump b/sublime/themes/all.themedump index 40aeff7c03..84e207cd0f 100644 Binary files a/sublime/themes/all.themedump and b/sublime/themes/all.themedump differ diff --git a/test_site/config.toml b/test_site/config.toml index 58f650ed13..21b9e74949 100644 --- a/test_site/config.toml +++ b/test_site/config.toml @@ -24,6 +24,7 @@ anchors = "on" [link_checker] skip_prefixes = [ "http://[2001:db8::]/", + "http://invaliddomain", ] skip_anchor_prefixes = [ diff --git a/test_site/content/posts/skip_prefixes.md b/test_site/content/posts/skip_prefixes.md new file mode 100644 index 0000000000..49f1ac301c --- /dev/null +++ b/test_site/content/posts/skip_prefixes.md @@ -0,0 +1,4 @@ ++++ ++++ + +[test skip 1](http://invaliddomain