diff --git a/.forgejo/workflows/publish.yaml b/.forgejo/workflows/publish.yaml index 04ab097..fc546f8 100644 --- a/.forgejo/workflows/publish.yaml +++ b/.forgejo/workflows/publish.yaml @@ -10,8 +10,7 @@ jobs: image: rust:slim steps: - name: Install action dependencies - run: | - apt-get install --no-install-recommends --update -y nodejs curl git + run: apt-get install --no-install-recommends --update -y nodejs curl git - name: Checkout code uses: actions/checkout@v6 @@ -22,13 +21,19 @@ jobs: run: | rustup component add llvm-tools-preview rustup component add --toolchain nightly rustfmt clippy + rustup target add x86_64-unknown-linux-gnu rustup target add x86_64-unknown-linux-musl - name: Setup additional tooling run: .forgejo/workflows/setup-tools.sh + - name: Setup CI user + run: | + useradd -m ci && chown -R ci:ci . + git config --global --add safe.directory "$PWD" + - name: Run all assessments - run: just verify + run: just ci verify - name: Build x64 glibc release binary run: just release-build x86_64-unknown-linux-gnu diff --git a/.forgejo/workflows/setup-tools.sh b/.forgejo/workflows/setup-tools.sh index cd38738..ff6e814 100755 --- a/.forgejo/workflows/setup-tools.sh +++ b/.forgejo/workflows/setup-tools.sh @@ -2,8 +2,8 @@ set -eu -JUST_VERSION="1.45.0" -JUST_SHA256SUM="dc3f958aaf8c6506dd90426e9b03f86dd15e74a6467ee0e54929f750af3d9e49" +JUST_VERSION="1.47.1" +JUST_SHA256SUM="3cb931ae25860f261ee373f32ede3b772ac91f14f588e4071576d3ffcf1a16fd" CARGO_LLVM_COV_VERSION="0.6.21" CARGO_LLVM_COV_SHA256SUM="57f491aedf7cdb261538ceb49cbb1ee9d27df7ca205a5e1a009caaf5cb911afb" CARGO_AUDIT_VERSION="0.22.1" diff --git a/.forgejo/workflows/verify.yaml b/.forgejo/workflows/verify.yaml index a8ebe71..2d4e590 100644 --- a/.forgejo/workflows/verify.yaml +++ b/.forgejo/workflows/verify.yaml @@ -34,17 +34,22 @@ jobs: - name: Setup additional tooling run: .forgejo/workflows/setup-tools.sh + - name: Setup CI user + run: | + useradd -m ci && chown -R ci:ci . + git config --global --add safe.directory "$PWD" + - name: Build - run: just build + run: just ci build - name: Format - run: just format-assess + run: just ci format-assess - name: Lint - run: just lint-assess + run: just ci lint-assess - name: Cargo check - run: just check + run: just ci check - name: Test - run: just test + run: just ci test - name: Assess test coverage - run: just cover-assess + run: just ci cover-assess diff --git a/.justfile b/.justfile index 14705dd..92e5a80 100644 --- a/.justfile +++ b/.justfile @@ -360,7 +360,7 @@ alias sa := security-assess # Find TODOs [group: 'assess'] todos-assess: - ! rg -M 200 --max-columns-preview TODO src + ! grep -rn TODO src alias ta := todos-assess @@ -435,6 +435,12 @@ choose: alias ch := choose +[script, private] +ci recipe: + su ci -c "just {{ recipe }}" + +## VARIABLES + export CARGO_TERM_COLOR := 'always' musl_target := "x86_64-unknown-linux-musl" @@ -454,4 +460,7 @@ lockfile_version := ``` | grep version | cut -d '"' -f 2 ``` +## OPTIONS + set unstable +set lazy diff --git a/Cargo.lock b/Cargo.lock index 87d028d..edb0fee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -259,7 +259,7 @@ dependencies = [ [[package]] name = "en" -version = "0.3.1-alpha" +version = "0.4.0-alpha" dependencies = [ "axum", "serde", @@ -854,9 +854,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.9" +version = "0.103.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef" dependencies = [ "ring", "rustls-pki-types", @@ -1209,9 +1209,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "3.2.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc97a28575b85cfedf2a7e7d3cc64b3e11bd8ac766666318003abbacc7a21fc" +checksum = "dea7109cdcd5864d4eeb1b58a1648dc9bf520360d7af16ec26d0a9354bafcfc0" dependencies = [ "base64", "flate2", @@ -1220,15 +1220,15 @@ dependencies = [ "rustls", "rustls-pki-types", "ureq-proto", - "utf-8", + "utf8-zero", "webpki-roots", ] [[package]] name = "ureq-proto" -version = "0.5.3" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f" +checksum = "e994ba84b0bd1b1b0cf92878b7ef898a5c1760108fe7b6010327e274917a808c" dependencies = [ "base64", "http", @@ -1237,10 +1237,10 @@ dependencies = [ ] [[package]] -name = "utf-8" -version = "0.7.6" +name = "utf8-zero" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +checksum = "b8c0a043c9540bae7c578c88f91dda8bd82e59ae27c21baca69c8b191aaf5a6e" [[package]] name = "version_check" diff --git a/Cargo.toml b/Cargo.toml index d42bdc2..bac9f33 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "en" -version = "0.3.1-alpha" +version = "0.4.0-alpha" description = "A non-linear writing instrument." license = "AGPL-3.0-only" diff --git a/src/dev/test.rs b/src/dev/test.rs index a644e30..8a4ec15 100644 --- a/src/dev/test.rs +++ b/src/dev/test.rs @@ -161,6 +161,18 @@ mod tests { assert!(format!("{error}").contains(io_payload)); } + #[test] + fn display_contains_str_from_tera_error() { + let payload = "pA6B0LhiiDMNCl1J"; + let tera_payload = "5ob8H594dCAQ8pfk"; + let error = Error { + message: payload.to_string(), + inner_tera: Some(tera::Error::msg(tera_payload)), + inner_io: None, + }; + assert!(format!("{error}").contains(payload)); + assert!(format!("{error}").contains(tera_payload)); + } #[test] fn from_io_error() { let payload = "YgmTKBm3VtHt5h3x9"; @@ -169,6 +181,15 @@ mod tests { assert!(error.message.contains(payload)); } + + #[test] + fn from_tera_error() { + let payload = "XEB3dcvYuz0M1lYt"; + let tera_error = tera::Error::msg(payload); + let error = Error::from(tera_error); + + assert!(error.message.contains(payload)); + } } #[cfg(test)] diff --git a/src/graph.rs b/src/graph.rs index 7e24eb0..67a32b5 100644 --- a/src/graph.rs +++ b/src/graph.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, path::PathBuf}; +use std::{collections::HashMap, io, path::PathBuf}; pub use edge::Edge; pub use meta::{Config, Meta}; @@ -43,6 +43,18 @@ pub struct Stats { } impl Graph { + fn welcome() -> Graph { + let toml = include_str!("../static/welcome.toml"); + let mut welcome_graph = match Graph::from_serial(toml, &Format::TOML) { + Ok(graph) => graph, + Err(error) => { + panic!("Welcome graph parsing must be infallible: {error:?}") + }, + }; + welcome_graph.modulate(); + welcome_graph + } + pub fn with_message(message: &str) -> Graph { let graph = Graph::default(); let mut messages = graph.meta.messages; @@ -69,12 +81,23 @@ impl Graph { /// Loads a Graph TOML file from CLI arguments or their defaults and /// returns a modulated Graph. /// + /// Loads a default graph with basic usage instructions if no file is found. + /// /// Returns a graph with an error message if any errors are propagated. pub fn load() -> Graph { let result = Graph::load_file(None); match result { Ok(graph) => graph, - Err(error) => Graph::malformed(Some(&error)), + Err(error) => { + if error.not_found { + return Graph::welcome() + } + if let Some(message) = error.message { + Graph::malformed(Some(&message)) + } else { + Graph::malformed(None) + } + }, } } @@ -84,7 +107,7 @@ impl Graph { /// /// # Errors /// Propagates errors from `Graph::read_file`. - pub fn load_file(path: Option<&str>) -> Result { + pub fn load_file(path: Option<&str>) -> Result { let mut graph = Graph::from_file(path)?; graph.modulate(); Ok(graph) @@ -95,21 +118,24 @@ impl Graph { /// # Errors /// Returns Err if it can't read the contents of `in_path`. /// Propagates errors from `Graph::from_serial`. - pub fn from_file(in_path: Option<&str>) -> Result { + pub fn from_file(in_path: Option<&str>) -> Result { let cli_path = Arguments::default().parse().graph_path; let path = in_path.map_or(cli_path, PathBuf::from); let toml_source = match std::fs::read_to_string(&path) { Ok(s) => s, - Err(e) => { + Err(error) => { log!( ERROR, - "Error reading path {}: {e}", + "Error reading path {}: {error}", path.as_path().display(), ); - return Err(format!( - "Failed reading file at {}", - path.as_path().display(), + return Err(LoadError::from_io_with_message( + &format!( + "Failed reading file at {}", + path.as_path().display(), + ), + error, )); }, }; @@ -489,12 +515,60 @@ impl Graph { } } +#[derive(Debug)] pub enum Format { TOML, JSON, Unsupported, } +#[derive(Debug)] +pub struct LoadError { + pub message: Option, + pub not_found: bool, + pub io_error: Option, + pub serial_error: Option, +} + +impl LoadError { + fn from_io_with_message(message: &str, io_error: io::Error) -> LoadError { + LoadError { + message: Some(String::from(message)), + not_found: io_error.kind() == io::ErrorKind::NotFound, + io_error: Some(io_error), + serial_error: None, + } + } +} + +impl From for LoadError { + fn from(error: SerialError) -> LoadError { + LoadError { + message: Some(error.message.clone()), + not_found: false, + serial_error: Some(error), + io_error: None, + } + } +} + +impl From for LoadError { + fn from(error: io::Error) -> LoadError { + LoadError { + message: Some(error.to_string()), + not_found: error.kind() == io::ErrorKind::NotFound, + io_error: Some(error), + serial_error: None, + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct SerialError { + pub cause: SerialErrorCause, + pub message: String, +} + #[derive(Serialize, Deserialize, Clone, Debug)] pub enum SerialErrorCause { UnsupportedFormat, @@ -511,12 +585,6 @@ impl std::fmt::Display for SerialErrorCause { } } -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct SerialError { - pub cause: SerialErrorCause, - pub message: String, -} - impl From for String { fn from(error: SerialError) -> String { format!("{}: {}", error.cause, error.message) @@ -1314,12 +1382,11 @@ mod serial_tests { use crate::dev::test::{Directories, Error}; #[test] - fn bad_graph_path() -> Result<(), Error> { + fn no_graph_fallback() -> Result<(), Error> { let _dirs = Directories::setup("bad_graph_path")?; let graph = Graph::load(); - let message = graph.meta.messages.first().unwrap(); - assert!(message.contains("Failed reading file at")); + assert_eq!(graph.nodes["GettingStarted"].title, "Getting Started"); Ok(()) } diff --git a/src/router/handlers/fixed.rs b/src/router/handlers/fixed.rs index 770d139..d9bd120 100644 --- a/src/router/handlers/fixed.rs +++ b/src/router/handlers/fixed.rs @@ -624,6 +624,7 @@ mod tests { use axum::http::status::StatusCode; use super::*; + use crate::router::handlers::mime::Mime; async fn wrap_serial(format: &str) -> Response { let state = GlobalState { @@ -724,7 +725,7 @@ mod tests { } #[test] - fn not_found_fallback_error() { + fn not_found_asset_error() { let error = fallback("not_found.png", &Graph::default()).unwrap_err(); assert!(matches!(&error.kind, AssetErrorKind::NotFound)); @@ -733,15 +734,28 @@ mod tests { .contains("The file was not found in the searched path") ); } + + #[test] + fn assemble_from_blob() { + let asset = Asset::new(&[1, 0, 1], Mime::Pdf).unwrap(); + let response = assemble(asset, &Graph::default()); + let content_type = + response.headers().get(header::CONTENT_TYPE).unwrap(); + assert_eq!(content_type, "application/pdf"); + } } #[cfg(test)] -#[expect(clippy::panic_in_result_fn)] +#[cfg(unix)] +#[expect(clippy::panic_in_result_fn, clippy::unwrap_in_result)] mod serial_tests { - use std::{fs, os::unix::fs::PermissionsExt as _}; + use std::{fs, os::unix::fs::PermissionsExt as _, path::PathBuf}; use super::*; - use crate::dev::test::{Directories, Error}; + use crate::{ + dev::test::{Directories, Error}, + router::handlers::mime::Mime, + }; #[test] fn io_asset_error() -> Result<(), Error> { @@ -755,6 +769,9 @@ mod serial_tests { permissions.set_mode(0o200); fs::set_permissions(&file, permissions)?; + let new_permissions = fs::metadata(&file)?.permissions(); + assert_eq!(new_permissions.mode() & 0o777, 0o200); + let error = fallback("unreadable.png", &Graph::default()).unwrap_err(); assert!(matches!(&error.kind, AssetErrorKind::IO)); @@ -765,4 +782,98 @@ mod serial_tests { Ok(()) } + + #[test] + fn target_file_exists() -> Result<(), Error> { + let dirs = Directories::setup("target_file_exists")?; + + let assets = dirs.assets.clone(); + let file = assets.join("asset.woff2"); + + fs::write(&file, [1, 0, 1])?; + let asset = fallback("asset.woff2", &Graph::default()).unwrap(); + assert!(asset.text.is_none()); + assert!(asset.blob.is_some()); + assert!(matches!(asset.mime, Mime::Woff2)); + + Ok(()) + } + + #[test] + fn default_font_found_if_serving_enabled() -> Result<(), Error> { + let dirs = Directories::setup("font_found_if_serving_enabled")?; + + let assets = dirs.assets.clone(); + let relative_font_path = + PathBuf::from(FONTS[0].0.replace("assets/", "")); + let font_path = assets.join(&relative_font_path); + let font_dir = font_path.parent().expect("failed getting font dir"); + + println!("{font_dir:?}"); + fs::create_dir_all(font_dir)?; + fs::write(&font_path, [1, 0, 1])?; + let graph = Graph::from_serial( + "[meta.config]\nserve_fonts = true", + &Format::TOML, + ) + .expect("failed instantiating graph"); + println!("{font_path:?}"); + let asset = fallback(relative_font_path.to_str().unwrap(), &graph) + .expect("fallback failed"); + + assert!(asset.text.is_none()); + assert!(asset.blob.is_some()); + assert!(matches!(asset.mime, Mime::Woff2)); + + Ok(()) + } + + #[test] + fn custom_font_found_if_serving_enabled() -> Result<(), Error> { + let dirs = Directories::setup("font_found_if_serving_enabled")?; + + let assets = dirs.assets.clone(); + let relative_font_path = "fonts/custom.ttf"; + let font_path = assets.join(relative_font_path); + let font_dir = font_path.parent().unwrap(); + + fs::create_dir_all(font_dir)?; + fs::write(&font_path, [1, 0, 1])?; + let graph = Graph::from_serial( + "[meta.config]\nserve_fonts = true", + &Format::TOML, + ) + .expect("failed instantiating graph"); + let asset = + fallback(relative_font_path, &graph).expect("fallback failed"); + + assert!(asset.text.is_none()); + assert!(asset.blob.is_some()); + assert!(matches!(asset.mime, Mime::Ttf)); + + Ok(()) + } + + #[test] + fn font_not_found_if_serving_disabled() -> Result<(), Error> { + let dirs = Directories::setup("target_file_exists")?; + + let assets = dirs.assets.clone(); + let relative_font_path = + PathBuf::from(FONTS[0].0.replace("assets/", "")); + let font_path = assets.join(&relative_font_path); + let font_dir = font_path.parent().unwrap(); + + fs::create_dir_all(font_dir)?; + fs::write(&font_path, [1, 0, 1])?; + let graph = Graph::from_serial( + "[meta.config]\nserve_fonts = false", + &Format::TOML, + ) + .unwrap(); + let error = fallback(font_path.to_str().unwrap(), &graph).unwrap_err(); + assert!(matches!(error.kind, AssetErrorKind::NotFound)); + + Ok(()) + } } diff --git a/src/router/handlers/template.rs b/src/router/handlers/template.rs index 2fd7a83..eab2675 100644 --- a/src/router/handlers/template.rs +++ b/src/router/handlers/template.rs @@ -137,15 +137,14 @@ fn load_templates() -> Result { let root = PathBuf::from("templates"); let default_names: Vec<&str> = DEFAULTS.iter().map(|(n, _)| *n).collect(); - log!( - DEBUG, - "Reading templates from {}, canonical form {:?}", - root.display(), - root.canonicalize() - ); - match fs::read_dir(&root) { Ok(dir) => { + log!( + DEBUG, + "Reading templates from root directory '{}', canonically {:?}", + root.display(), + root.canonicalize() + ); for file_opt in dir { let file = file_opt?; let path = file.path(); @@ -168,6 +167,11 @@ fn load_templates() -> Result { } }, Err(error) => { + log!( + VERBOSE, + "A 'templates' directory was not found or is not accessible: \ + only built-in templates will be available" + ); if error.kind() != ErrorKind::NotFound { return Err(tera::Error::msg(error.to_string())) } diff --git a/src/syntax/content/parser/context.rs b/src/syntax/content/parser/context.rs index 6320a5d..e5010f6 100644 --- a/src/syntax/content/parser/context.rs +++ b/src/syntax/content/parser/context.rs @@ -77,4 +77,27 @@ mod tests { state.context.block = Block::List; super::close(&state, &mut vec![]); } + + #[test] + #[should_panic(expected = "End of input with open quote")] + fn open_quote_eoi() { + let mut state = State::default(); + state.context.block = Block::Quote; + super::close(&state, &mut vec![]); + } + + #[test] + #[should_panic(expected = "End of input with open table")] + fn open_table_eoi() { + let mut state = State::default(); + state.context.block = Block::Table; + super::close(&state, &mut vec![]); + } + + #[test] + fn open_verse_eoi() { + let mut state = State::default(); + state.context.block = Block::Verse; + super::close(&state, &mut vec![]); + } } diff --git a/src/syntax/content/parser/token/anchor.rs b/src/syntax/content/parser/token/anchor.rs index 54639df..1efecf9 100644 --- a/src/syntax/content/parser/token/anchor.rs +++ b/src/syntax/content/parser/token/anchor.rs @@ -261,4 +261,15 @@ mod tests { let anchor = Anchor::default(); assert_eq!(format!("{anchor}"), "Anchor -> "); } + + #[test] + fn flatten() { + let payload = "tpBTViYnldoTqDsB"; + let mut anchor = Anchor::default(); + anchor.text = String::from(payload); + assert_eq!(anchor.flatten(), payload); + + let token = Token::Anchor(Box::new(anchor)); + assert_eq!(token.flatten(), payload); + } } diff --git a/src/syntax/content/parser/token/bold.rs b/src/syntax/content/parser/token/bold.rs index 6053ad7..6ae041e 100644 --- a/src/syntax/content/parser/token/bold.rs +++ b/src/syntax/content/parser/token/bold.rs @@ -67,5 +67,8 @@ mod tests { fn flatten() { let bold = Bold::new(false); assert_eq!(bold.flatten(), ""); + + let token = Token::Bold(bold); + assert_eq!(token.flatten(), ""); } } diff --git a/src/syntax/content/parser/token/checkbox.rs b/src/syntax/content/parser/token/checkbox.rs index 61bc61a..9230af6 100644 --- a/src/syntax/content/parser/token/checkbox.rs +++ b/src/syntax/content/parser/token/checkbox.rs @@ -73,5 +73,8 @@ mod tests { fn flatten() { let checkbox = CheckBox::new(false); assert_eq!(checkbox.flatten(), ""); + + let token = Token::CheckBox(checkbox); + assert_eq!(token.flatten(), ""); } } diff --git a/src/syntax/content/parser/token/code.rs b/src/syntax/content/parser/token/code.rs index d11d5fe..8f82eba 100644 --- a/src/syntax/content/parser/token/code.rs +++ b/src/syntax/content/parser/token/code.rs @@ -62,4 +62,13 @@ mod tests { code.open = false; assert_eq!(format!("{}", Token::Code(code)), "Tk:Code [closed]"); } + + #[test] + fn flatten() { + let code = Code::new(true); + assert_eq!(code.flatten(), ""); + + let token = Token::Code(code); + assert_eq!(token.flatten(), ""); + } } diff --git a/src/syntax/content/parser/token/header.rs b/src/syntax/content/parser/token/header.rs index 1f5fe02..9de3b5d 100644 --- a/src/syntax/content/parser/token/header.rs +++ b/src/syntax/content/parser/token/header.rs @@ -322,4 +322,13 @@ mod tests { format!("Header [unknown L1 DOM ID {payload}]") ); } + + #[test] + fn flatten() { + let header = Header::new(Level::Two, true, Some("MNxqaFfIbCzw")); + assert_eq!(header.flatten(), ""); + + let token = Token::Header(header); + assert_eq!(token.flatten(), ""); + } } diff --git a/src/syntax/content/parser/token/item.rs b/src/syntax/content/parser/token/item.rs index 02b34ee..0e36144 100644 --- a/src/syntax/content/parser/token/item.rs +++ b/src/syntax/content/parser/token/item.rs @@ -53,11 +53,21 @@ mod tests { #[should_panic( expected = "Items should only be rendered by a list's render method" )] - fn render() { + fn token_render() { let item = Item::new("aCNuZwwzrt", None); item.render(); } + #[test] + #[should_panic( + expected = "Items should only be rendered by a list's render method" + )] + fn render() { + let item = Item::new("vuv3ipykTzuf", None); + let token = Token::Item(item); + token.render(); + } + #[test] fn probe() { let lexeme = Lexeme::new("bOa", "2R6", "4Mp"); @@ -89,5 +99,6 @@ mod tests { fn flatten() { let item = Item::new("", None); assert_eq!(item.flatten(), ""); + assert_eq!(Token::Item(item).flatten(), ""); } } diff --git a/src/syntax/content/parser/token/linebreak.rs b/src/syntax/content/parser/token/linebreak.rs index ea20a74..65ccc73 100644 --- a/src/syntax/content/parser/token/linebreak.rs +++ b/src/syntax/content/parser/token/linebreak.rs @@ -30,4 +30,10 @@ mod tests { fn token_display() { assert_eq!(format!("{}", Token::LineBreak(LineBreak)), "Tk:LineBreak"); } + + #[test] + fn flatten() { + assert_eq!(LineBreak.flatten(), "\n"); + assert_eq!(Token::LineBreak(LineBreak).flatten(), "\n"); + } } diff --git a/src/syntax/content/parser/token/list.rs b/src/syntax/content/parser/token/list.rs index db7aff1..1120b9d 100644 --- a/src/syntax/content/parser/token/list.rs +++ b/src/syntax/content/parser/token/list.rs @@ -232,4 +232,13 @@ mod tests { \n\n" ); } + + #[test] + fn flatten() { + let list = List::new(true); + assert_eq!(list.flatten(), "[List: 0 items]"); + + let token = Token::List(List::new(true)); + assert_eq!(token.flatten(), "[List: 0 items]"); + } } diff --git a/src/syntax/content/parser/token/literal.rs b/src/syntax/content/parser/token/literal.rs index e165d73..236ee41 100644 --- a/src/syntax/content/parser/token/literal.rs +++ b/src/syntax/content/parser/token/literal.rs @@ -45,4 +45,12 @@ mod tests { literal.text = String::from("TjY02"); assert_eq!(format!("{}", Token::Literal(literal)), "Tk:Literal TjY02"); } + + #[test] + fn flatten() { + let payload = "vJtsvWD7ErYB"; + let literal = Literal::lex(&Lexeme::new(payload, "", "")); + assert_eq!(literal.flatten(), payload); + assert_eq!(Token::Literal(literal).flatten(), payload); + } } diff --git a/src/syntax/content/parser/token/oblique.rs b/src/syntax/content/parser/token/oblique.rs index 7e4baf1..a156a19 100644 --- a/src/syntax/content/parser/token/oblique.rs +++ b/src/syntax/content/parser/token/oblique.rs @@ -73,5 +73,8 @@ mod tests { fn flatten() { let oblique = Oblique::new(false); assert_eq!(oblique.flatten(), ""); + + let token = Token::Oblique(oblique); + assert_eq!(token.flatten(), ""); } } diff --git a/src/syntax/content/parser/token/paragraph.rs b/src/syntax/content/parser/token/paragraph.rs index 3512320..c0756af 100644 --- a/src/syntax/content/parser/token/paragraph.rs +++ b/src/syntax/content/parser/token/paragraph.rs @@ -93,4 +93,15 @@ mod tests { "Tk:Paragraph [unknown]" ); } + + #[test] + fn flatten() { + let open = Paragraph::new(true); + let closed = Paragraph::new(false); + + assert_eq!(open.flatten(), ""); + assert_eq!(closed.flatten(), ""); + assert_eq!(Token::Paragraph(open).flatten(), ""); + assert_eq!(Token::Paragraph(closed).flatten(), ""); + } } diff --git a/src/syntax/content/parser/token/preformat.rs b/src/syntax/content/parser/token/preformat.rs index 666ad68..d950590 100644 --- a/src/syntax/content/parser/token/preformat.rs +++ b/src/syntax/content/parser/token/preformat.rs @@ -94,5 +94,8 @@ mod tests { fn flatten() { let preformat = PreFormat::new(false); assert_eq!(preformat.flatten(), ""); + + let token = Token::PreFormat(preformat); + assert_eq!(token.flatten(), ""); } } diff --git a/src/syntax/content/parser/token/quote.rs b/src/syntax/content/parser/token/quote.rs index a6d144d..397b41f 100644 --- a/src/syntax/content/parser/token/quote.rs +++ b/src/syntax/content/parser/token/quote.rs @@ -26,7 +26,9 @@ impl Parseable for Quote { lexeme.match_char('>') && lexeme.match_next_char(' ') } - fn lex(_lexeme: &Lexeme) -> Quote { Quote::default() } + fn lex(_lexeme: &Lexeme) -> Quote { + panic!("Attempt to lex a quote directly from a lexeme") + } fn render(&self) -> String { let opening = if let Some(url) = &self.url { @@ -47,7 +49,13 @@ impl Parseable for Quote { format!("\n{opening}\n{content}\n\n") } - fn flatten(&self) -> String { String::default() } + fn flatten(&self) -> String { + if let Some(citation) = &self.citation { + format!(r#""{}" -- {}"#, self.text, citation) + } else { + format!(r#""{}""#, self.text) + } + } } impl std::fmt::Display for Quote { @@ -63,3 +71,96 @@ impl std::fmt::Display for Quote { write!(f, "Quote [{}]", meta.trim()) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::syntax::content::parser::Token; + + #[test] + fn display() { + let mut quote_slim = Quote::default(); + quote_slim.text = "iXh0141J7B8P46Gv".to_string(); + + println!("{quote_slim}"); + assert!(format!("{quote_slim}").contains("Quote")); + assert!(!format!("{quote_slim}").contains("+url")); + assert!(!format!("{quote_slim}").contains("+citation")); + assert_eq!(format!("{}", Token::Quote(quote_slim)), "Tk:Quote []"); + + let mut quote_cited = Quote::default(); + quote_cited.text = "iXh0141J7B8P46Gv".to_string(); + quote_cited.citation = Some("k8Fy7htmvi2NG7yh".to_string()); + + println!("{quote_cited}"); + assert!(format!("{quote_cited}").contains("Quote")); + assert!(!format!("{quote_cited}").contains("+url")); + assert!(format!("{quote_cited}").contains("+citation")); + assert_eq!( + format!("{}", Token::Quote(quote_cited)), + "Tk:Quote [+citation]", + ); + + let mut quote_with_url = Quote::default(); + quote_with_url.text = "iXh0141J7B8P46Gv".to_string(); + quote_with_url.url = Some("CttVJU2IHDsjSjao".to_string()); + + println!("{quote_with_url}"); + assert!(format!("{quote_with_url}").contains("Quote")); + assert!(format!("{quote_with_url}").contains("+url")); + assert!(!format!("{quote_with_url}").contains("+citation")); + assert_eq!( + format!("{}", Token::Quote(quote_with_url)), + "Tk:Quote [+url]", + ); + + let mut quote_full = Quote::default(); + quote_full.text = "iXh0141J7B8P46Gv".to_string(); + quote_full.citation = Some("k8Fy7htmvi2NG7yh".to_string()); + quote_full.url = Some("CttVJU2IHDsjSjao".to_string()); + + println!("{quote_full}"); + assert!(format!("{quote_full}").contains("Quote")); + assert!(format!("{quote_full}").contains("+url")); + assert!(format!("{quote_full}").contains("+citation")); + assert_eq!( + format!("{}", Token::Quote(quote_full)), + "Tk:Quote [+url +citation]", + ); + } + + #[test] + fn flatten() { + assert_eq!(Quote::default().flatten(), r#""""#); + + let mut without_citation = Quote::default(); + let text = "AphyFDQHVbkOeaNw"; + without_citation.text = text.to_string(); + assert_eq!(without_citation.flatten(), format!(r#""{text}""#)); + + let without_citation_token = Token::Quote(without_citation); + assert_eq!(without_citation_token.flatten(), format!(r#""{text}""#)); + + let mut with_citation = Quote::default(); + let citation = "B35rcofYM0J7"; + with_citation.text = text.to_string(); + with_citation.citation = Some(citation.to_string()); + assert_eq!( + with_citation.flatten(), + format!(r#""{text}" -- {citation}"#) + ); + + let with_citation_token = Token::Quote(with_citation); + assert_eq!( + with_citation_token.flatten(), + format!(r#""{text}" -- {citation}"#) + ); + } + + #[test] + #[should_panic(expected = "Attempt to lex a quote directly from a lexeme")] + fn lex() { + let lexeme = Lexeme::new("z2UI", "FiCd", "rtq4"); + Quote::lex(&lexeme); + } +} diff --git a/src/syntax/content/parser/token/strike.rs b/src/syntax/content/parser/token/strike.rs index 2d98822..14b1dc4 100644 --- a/src/syntax/content/parser/token/strike.rs +++ b/src/syntax/content/parser/token/strike.rs @@ -69,5 +69,8 @@ mod tests { fn flatten() { let strike = Strike::new(false); assert_eq!(strike.flatten(), ""); + + let token = Token::Strike(strike); + assert_eq!(token.flatten(), ""); } } diff --git a/src/syntax/content/parser/token/table.rs b/src/syntax/content/parser/token/table.rs index 2eb29e3..f1dbae7 100644 --- a/src/syntax/content/parser/token/table.rs +++ b/src/syntax/content/parser/token/table.rs @@ -25,6 +25,7 @@ impl Table { } } + /// Counts the number of cells in the last row. pub fn last_row_count(&self) -> usize { if let Some(last) = self.contents.last() { last.len() @@ -37,7 +38,9 @@ impl Table { impl Parseable for Table { fn probe(lexeme: &Lexeme) -> bool { lexeme.match_char_sequence('%', '\n') } - fn lex(_lexeme: &Lexeme) -> Table { Table::default() } + fn lex(_lexeme: &Lexeme) -> Table { + panic!("Attempt to lex a table directly from a lexeme") + } fn render(&self) -> String { let mut xml = String::from("\n\n"); @@ -67,11 +70,100 @@ impl Parseable for Table { xml } - fn flatten(&self) -> String { String::default() } + fn flatten(&self) -> String { String::from("[Table]") } } impl std::fmt::Display for Table { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "Table") + let headers_width = self.headers.len(); + let contents_height = self.contents.len(); + let contents_width = self.last_row_count(); + + let mut extra = String::default(); + if headers_width > 0 && contents_height > 0 { + extra = format!( + " [{contents_width}x{contents_height} +{headers_width} headers]" + ); + } else if headers_width > 0 { + extra = format!(" [+{headers_width} headers]"); + } else if contents_height > 0 { + extra = format!(" [{contents_width}x{contents_height}]"); + } + + write!(f, "Table{extra}") + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::syntax::content::parser::Token; + + #[test] + #[should_panic(expected = "Attempt to lex a table directly from a lexeme")] + fn lex() { + let lexeme = Lexeme::new("tp0h", "rrFt", "Qouf"); + Table::lex(&lexeme); + } + + #[test] + fn flatten() { + assert_eq!(Table::default().flatten(), "[Table]"); + assert_eq!(Token::Table(Table::default()).flatten(), "[Table]"); + } + + #[test] + fn display() { + use std::string::ToString; + + let mut table = Table::default(); + table.add_header("A"); + table.add_header("B"); + table.add_header("C"); + + let table_token = Token::Table(table.clone()); + assert_eq!(format!("{table}"), format!("Table [+3 headers]")); + assert_eq!(format!("{table_token}"), format!("Tk:Table [+3 headers]")); + + table.add_row( + ["1", "2", "3"] + .iter() + .map(ToString::to_string) + .collect::>(), + ); + table.add_row( + ["4", "5", "6"] + .iter() + .map(ToString::to_string) + .collect::>(), + ); + table.add_row( + ["7", "8", "9"] + .iter() + .map(ToString::to_string) + .collect::>(), + ); + + let table_token2 = Token::Table(table.clone()); + assert_eq!(format!("{table}"), "Table [3x3 +3 headers]"); + assert_eq!(format!("{table_token2}"), "Tk:Table [3x3 +3 headers]"); + + let mut table2 = Table::default(); + table2.add_row( + ["1", "2", "3"] + .iter() + .map(ToString::to_string) + .collect::>(), + ); + table2.add_row( + ["2", "4", "6"] + .iter() + .map(ToString::to_string) + .collect::>(), + ); + + let table2_token = Token::Table(table2.clone()); + assert_eq!(format!("{table2}"), "Table [3x2]"); + assert_eq!(format!("{table2_token}"), "Tk:Table [3x2]"); } } diff --git a/src/syntax/content/parser/token/underline.rs b/src/syntax/content/parser/token/underline.rs index e8e2668..0cd565e 100644 --- a/src/syntax/content/parser/token/underline.rs +++ b/src/syntax/content/parser/token/underline.rs @@ -75,5 +75,8 @@ mod tests { fn flatten() { let underline = Underline::new(false); assert_eq!(underline.flatten(), ""); + + let token = Token::Underline(underline); + assert_eq!(token.flatten(), ""); } } diff --git a/src/syntax/content/parser/token/verse.rs b/src/syntax/content/parser/token/verse.rs index 0c0d696..f3f7910 100644 --- a/src/syntax/content/parser/token/verse.rs +++ b/src/syntax/content/parser/token/verse.rs @@ -55,6 +55,7 @@ impl std::fmt::Display for Verse { #[cfg(test)] mod tests { use super::*; + use crate::syntax::content::parser::Token; #[test] fn lexed_verse_is_empty() { @@ -63,9 +64,12 @@ mod tests { } #[test] - fn flat_verse_is_empty() { + fn flatten() { let verse = Verse::new(true); assert!(verse.flatten().is_empty()); + + let token = Token::Verse(verse); + assert_eq!(token.flatten(), ""); } #[test] @@ -80,12 +84,18 @@ mod tests { #[test] fn display() { let open = Verse::new(true); + let open_token = Token::Verse(open.clone()); assert_eq!(format!("{open}"), "Verse [open]"); + assert_eq!(format!("{open_token}"), "Tk:Verse [open]"); let closed = Verse::new(false); + let closed_token = Token::Verse(closed.clone()); assert_eq!(format!("{closed}"), "Verse [closed]"); + assert_eq!(format!("{closed_token}"), "Tk:Verse [closed]"); let unknown = Verse::lex(&Lexeme::default()); + let unknown_token = Token::Verse(unknown.clone()); assert_eq!(format!("{unknown}"), "Verse [unknown]"); + assert_eq!(format!("{unknown_token}"), "Tk:Verse [unknown]"); } } diff --git a/static/welcome.toml b/static/welcome.toml new file mode 100644 index 0000000..7ff2d33 --- /dev/null +++ b/static/welcome.toml @@ -0,0 +1,17 @@ +[nodes.GettingStarted] +title = "Getting Started" +text = """ +## Welcome to en! +# +If you are seeing this, it's working! + +Now that you know how to run it, tell en how to find your graph file by adding a `--graph` option: + +` +en --graph my_graph.toml +` + +Alternatively, you can also add a `static` directory next to the en binary with a `graph.toml` file in it. + +To learn how to write your first graph and everything else about en, check out the |documentation|https://en.jutty.dev|. +"""