diff --git a/src/dev/test.rs b/src/dev/test.rs
index a644e30..8a4ec15 100644
--- a/src/dev/test.rs
+++ b/src/dev/test.rs
@@ -161,6 +161,18 @@ mod tests {
assert!(format!("{error}").contains(io_payload));
}
+ #[test]
+ fn display_contains_str_from_tera_error() {
+ let payload = "pA6B0LhiiDMNCl1J";
+ let tera_payload = "5ob8H594dCAQ8pfk";
+ let error = Error {
+ message: payload.to_string(),
+ inner_tera: Some(tera::Error::msg(tera_payload)),
+ inner_io: None,
+ };
+ assert!(format!("{error}").contains(payload));
+ assert!(format!("{error}").contains(tera_payload));
+ }
#[test]
fn from_io_error() {
let payload = "YgmTKBm3VtHt5h3x9";
@@ -169,6 +181,15 @@ mod tests {
assert!(error.message.contains(payload));
}
+
+ #[test]
+ fn from_tera_error() {
+ let payload = "XEB3dcvYuz0M1lYt";
+ let tera_error = tera::Error::msg(payload);
+ let error = Error::from(tera_error);
+
+ assert!(error.message.contains(payload));
+ }
}
#[cfg(test)]
diff --git a/src/router/handlers/fixed.rs b/src/router/handlers/fixed.rs
index 390e794..d9bd120 100644
--- a/src/router/handlers/fixed.rs
+++ b/src/router/handlers/fixed.rs
@@ -624,6 +624,7 @@ mod tests {
use axum::http::status::StatusCode;
use super::*;
+ use crate::router::handlers::mime::Mime;
async fn wrap_serial(format: &str) -> Response
{
let state = GlobalState {
@@ -733,16 +734,28 @@ mod tests {
.contains("The file was not found in the searched path")
);
}
+
+ #[test]
+ fn assemble_from_blob() {
+ let asset = Asset::new(&[1, 0, 1], Mime::Pdf).unwrap();
+ let response = assemble(asset, &Graph::default());
+ let content_type =
+ response.headers().get(header::CONTENT_TYPE).unwrap();
+ assert_eq!(content_type, "application/pdf");
+ }
}
#[cfg(test)]
#[cfg(unix)]
-#[expect(clippy::panic_in_result_fn)]
+#[expect(clippy::panic_in_result_fn, clippy::unwrap_in_result)]
mod serial_tests {
- use std::{fs, os::unix::fs::PermissionsExt as _};
+ use std::{fs, os::unix::fs::PermissionsExt as _, path::PathBuf};
use super::*;
- use crate::dev::test::{Directories, Error};
+ use crate::{
+ dev::test::{Directories, Error},
+ router::handlers::mime::Mime,
+ };
#[test]
fn io_asset_error() -> Result<(), Error> {
@@ -769,4 +782,98 @@ mod serial_tests {
Ok(())
}
+
+ #[test]
+ fn target_file_exists() -> Result<(), Error> {
+ let dirs = Directories::setup("target_file_exists")?;
+
+ let assets = dirs.assets.clone();
+ let file = assets.join("asset.woff2");
+
+ fs::write(&file, [1, 0, 1])?;
+ let asset = fallback("asset.woff2", &Graph::default()).unwrap();
+ assert!(asset.text.is_none());
+ assert!(asset.blob.is_some());
+ assert!(matches!(asset.mime, Mime::Woff2));
+
+ Ok(())
+ }
+
+ #[test]
+ fn default_font_found_if_serving_enabled() -> Result<(), Error> {
+ let dirs = Directories::setup("font_found_if_serving_enabled")?;
+
+ let assets = dirs.assets.clone();
+ let relative_font_path =
+ PathBuf::from(FONTS[0].0.replace("assets/", ""));
+ let font_path = assets.join(&relative_font_path);
+ let font_dir = font_path.parent().expect("failed getting font dir");
+
+ println!("{font_dir:?}");
+ fs::create_dir_all(font_dir)?;
+ fs::write(&font_path, [1, 0, 1])?;
+ let graph = Graph::from_serial(
+ "[meta.config]\nserve_fonts = true",
+ &Format::TOML,
+ )
+ .expect("failed instantiating graph");
+ println!("{font_path:?}");
+ let asset = fallback(relative_font_path.to_str().unwrap(), &graph)
+ .expect("fallback failed");
+
+ assert!(asset.text.is_none());
+ assert!(asset.blob.is_some());
+ assert!(matches!(asset.mime, Mime::Woff2));
+
+ Ok(())
+ }
+
+ #[test]
+ fn custom_font_found_if_serving_enabled() -> Result<(), Error> {
+ let dirs = Directories::setup("font_found_if_serving_enabled")?;
+
+ let assets = dirs.assets.clone();
+ let relative_font_path = "fonts/custom.ttf";
+ let font_path = assets.join(relative_font_path);
+ let font_dir = font_path.parent().unwrap();
+
+ fs::create_dir_all(font_dir)?;
+ fs::write(&font_path, [1, 0, 1])?;
+ let graph = Graph::from_serial(
+ "[meta.config]\nserve_fonts = true",
+ &Format::TOML,
+ )
+ .expect("failed instantiating graph");
+ let asset =
+ fallback(relative_font_path, &graph).expect("fallback failed");
+
+ assert!(asset.text.is_none());
+ assert!(asset.blob.is_some());
+ assert!(matches!(asset.mime, Mime::Ttf));
+
+ Ok(())
+ }
+
+ #[test]
+ fn font_not_found_if_serving_disabled() -> Result<(), Error> {
+ let dirs = Directories::setup("target_file_exists")?;
+
+ let assets = dirs.assets.clone();
+ let relative_font_path =
+ PathBuf::from(FONTS[0].0.replace("assets/", ""));
+ let font_path = assets.join(&relative_font_path);
+ let font_dir = font_path.parent().unwrap();
+
+ fs::create_dir_all(font_dir)?;
+ fs::write(&font_path, [1, 0, 1])?;
+ let graph = Graph::from_serial(
+ "[meta.config]\nserve_fonts = false",
+ &Format::TOML,
+ )
+ .unwrap();
+ let error = fallback(font_path.to_str().unwrap(), &graph).unwrap_err();
+ assert!(matches!(error.kind, AssetErrorKind::NotFound));
+
+ Ok(())
+ }
}
diff --git a/src/syntax/content/parser/context.rs b/src/syntax/content/parser/context.rs
index 6320a5d..e5010f6 100644
--- a/src/syntax/content/parser/context.rs
+++ b/src/syntax/content/parser/context.rs
@@ -77,4 +77,27 @@ mod tests {
state.context.block = Block::List;
super::close(&state, &mut vec![]);
}
+
+ #[test]
+ #[should_panic(expected = "End of input with open quote")]
+ fn open_quote_eoi() {
+ let mut state = State::default();
+ state.context.block = Block::Quote;
+ super::close(&state, &mut vec![]);
+ }
+
+ #[test]
+ #[should_panic(expected = "End of input with open table")]
+ fn open_table_eoi() {
+ let mut state = State::default();
+ state.context.block = Block::Table;
+ super::close(&state, &mut vec![]);
+ }
+
+ #[test]
+ fn open_verse_eoi() {
+ let mut state = State::default();
+ state.context.block = Block::Verse;
+ super::close(&state, &mut vec![]);
+ }
}
diff --git a/src/syntax/content/parser/token/anchor.rs b/src/syntax/content/parser/token/anchor.rs
index 54639df..1efecf9 100644
--- a/src/syntax/content/parser/token/anchor.rs
+++ b/src/syntax/content/parser/token/anchor.rs
@@ -261,4 +261,15 @@ mod tests {
let anchor = Anchor::default();
assert_eq!(format!("{anchor}"), "Anchor -> ");
}
+
+ #[test]
+ fn flatten() {
+ let payload = "tpBTViYnldoTqDsB";
+ let mut anchor = Anchor::default();
+ anchor.text = String::from(payload);
+ assert_eq!(anchor.flatten(), payload);
+
+ let token = Token::Anchor(Box::new(anchor));
+ assert_eq!(token.flatten(), payload);
+ }
}
diff --git a/src/syntax/content/parser/token/bold.rs b/src/syntax/content/parser/token/bold.rs
index 6053ad7..6ae041e 100644
--- a/src/syntax/content/parser/token/bold.rs
+++ b/src/syntax/content/parser/token/bold.rs
@@ -67,5 +67,8 @@ mod tests {
fn flatten() {
let bold = Bold::new(false);
assert_eq!(bold.flatten(), "");
+
+ let token = Token::Bold(bold);
+ assert_eq!(token.flatten(), "");
}
}
diff --git a/src/syntax/content/parser/token/checkbox.rs b/src/syntax/content/parser/token/checkbox.rs
index 61bc61a..9230af6 100644
--- a/src/syntax/content/parser/token/checkbox.rs
+++ b/src/syntax/content/parser/token/checkbox.rs
@@ -73,5 +73,8 @@ mod tests {
fn flatten() {
let checkbox = CheckBox::new(false);
assert_eq!(checkbox.flatten(), "");
+
+ let token = Token::CheckBox(checkbox);
+ assert_eq!(token.flatten(), "");
}
}
diff --git a/src/syntax/content/parser/token/code.rs b/src/syntax/content/parser/token/code.rs
index d11d5fe..8f82eba 100644
--- a/src/syntax/content/parser/token/code.rs
+++ b/src/syntax/content/parser/token/code.rs
@@ -62,4 +62,13 @@ mod tests {
code.open = false;
assert_eq!(format!("{}", Token::Code(code)), "Tk:Code [closed]");
}
+
+ #[test]
+ fn flatten() {
+ let code = Code::new(true);
+ assert_eq!(code.flatten(), "");
+
+ let token = Token::Code(code);
+ assert_eq!(token.flatten(), "");
+ }
}
diff --git a/src/syntax/content/parser/token/header.rs b/src/syntax/content/parser/token/header.rs
index 1f5fe02..9de3b5d 100644
--- a/src/syntax/content/parser/token/header.rs
+++ b/src/syntax/content/parser/token/header.rs
@@ -322,4 +322,13 @@ mod tests {
format!("Header [unknown L1 DOM ID {payload}]")
);
}
+
+ #[test]
+ fn flatten() {
+ let header = Header::new(Level::Two, true, Some("MNxqaFfIbCzw"));
+ assert_eq!(header.flatten(), "");
+
+ let token = Token::Header(header);
+ assert_eq!(token.flatten(), "");
+ }
}
diff --git a/src/syntax/content/parser/token/item.rs b/src/syntax/content/parser/token/item.rs
index 02b34ee..0e36144 100644
--- a/src/syntax/content/parser/token/item.rs
+++ b/src/syntax/content/parser/token/item.rs
@@ -53,11 +53,21 @@ mod tests {
#[should_panic(
expected = "Items should only be rendered by a list's render method"
)]
- fn render() {
+ fn token_render() {
let item = Item::new("aCNuZwwzrt", None);
item.render();
}
+ #[test]
+ #[should_panic(
+ expected = "Items should only be rendered by a list's render method"
+ )]
+ fn render() {
+ let item = Item::new("vuv3ipykTzuf", None);
+ let token = Token::Item(item);
+ token.render();
+ }
+
#[test]
fn probe() {
let lexeme = Lexeme::new("bOa", "2R6", "4Mp");
@@ -89,5 +99,6 @@ mod tests {
fn flatten() {
let item = Item::new("", None);
assert_eq!(item.flatten(), "");
+ assert_eq!(Token::Item(item).flatten(), "");
}
}
diff --git a/src/syntax/content/parser/token/linebreak.rs b/src/syntax/content/parser/token/linebreak.rs
index ea20a74..65ccc73 100644
--- a/src/syntax/content/parser/token/linebreak.rs
+++ b/src/syntax/content/parser/token/linebreak.rs
@@ -30,4 +30,10 @@ mod tests {
fn token_display() {
assert_eq!(format!("{}", Token::LineBreak(LineBreak)), "Tk:LineBreak");
}
+
+ #[test]
+ fn flatten() {
+ assert_eq!(LineBreak.flatten(), "\n");
+ assert_eq!(Token::LineBreak(LineBreak).flatten(), "\n");
+ }
}
diff --git a/src/syntax/content/parser/token/list.rs b/src/syntax/content/parser/token/list.rs
index db7aff1..1120b9d 100644
--- a/src/syntax/content/parser/token/list.rs
+++ b/src/syntax/content/parser/token/list.rs
@@ -232,4 +232,13 @@ mod tests {
\n\n"
);
}
+
+ #[test]
+ fn flatten() {
+ let list = List::new(true);
+ assert_eq!(list.flatten(), "[List: 0 items]");
+
+ let token = Token::List(List::new(true));
+ assert_eq!(token.flatten(), "[List: 0 items]");
+ }
}
diff --git a/src/syntax/content/parser/token/literal.rs b/src/syntax/content/parser/token/literal.rs
index e165d73..236ee41 100644
--- a/src/syntax/content/parser/token/literal.rs
+++ b/src/syntax/content/parser/token/literal.rs
@@ -45,4 +45,12 @@ mod tests {
literal.text = String::from("TjY02");
assert_eq!(format!("{}", Token::Literal(literal)), "Tk:Literal TjY02");
}
+
+ #[test]
+ fn flatten() {
+ let payload = "vJtsvWD7ErYB";
+ let literal = Literal::lex(&Lexeme::new(payload, "", ""));
+ assert_eq!(literal.flatten(), payload);
+ assert_eq!(Token::Literal(literal).flatten(), payload);
+ }
}
diff --git a/src/syntax/content/parser/token/oblique.rs b/src/syntax/content/parser/token/oblique.rs
index 7e4baf1..a156a19 100644
--- a/src/syntax/content/parser/token/oblique.rs
+++ b/src/syntax/content/parser/token/oblique.rs
@@ -73,5 +73,8 @@ mod tests {
fn flatten() {
let oblique = Oblique::new(false);
assert_eq!(oblique.flatten(), "");
+
+ let token = Token::Oblique(oblique);
+ assert_eq!(token.flatten(), "");
}
}
diff --git a/src/syntax/content/parser/token/paragraph.rs b/src/syntax/content/parser/token/paragraph.rs
index 3512320..c0756af 100644
--- a/src/syntax/content/parser/token/paragraph.rs
+++ b/src/syntax/content/parser/token/paragraph.rs
@@ -93,4 +93,15 @@ mod tests {
"Tk:Paragraph [unknown]"
);
}
+
+ #[test]
+ fn flatten() {
+ let open = Paragraph::new(true);
+ let closed = Paragraph::new(false);
+
+ assert_eq!(open.flatten(), "");
+ assert_eq!(closed.flatten(), "");
+ assert_eq!(Token::Paragraph(open).flatten(), "");
+ assert_eq!(Token::Paragraph(closed).flatten(), "");
+ }
}
diff --git a/src/syntax/content/parser/token/preformat.rs b/src/syntax/content/parser/token/preformat.rs
index 666ad68..d950590 100644
--- a/src/syntax/content/parser/token/preformat.rs
+++ b/src/syntax/content/parser/token/preformat.rs
@@ -94,5 +94,8 @@ mod tests {
fn flatten() {
let preformat = PreFormat::new(false);
assert_eq!(preformat.flatten(), "");
+
+ let token = Token::PreFormat(preformat);
+ assert_eq!(token.flatten(), "");
}
}
diff --git a/src/syntax/content/parser/token/quote.rs b/src/syntax/content/parser/token/quote.rs
index a6d144d..397b41f 100644
--- a/src/syntax/content/parser/token/quote.rs
+++ b/src/syntax/content/parser/token/quote.rs
@@ -26,7 +26,9 @@ impl Parseable for Quote {
lexeme.match_char('>') && lexeme.match_next_char(' ')
}
- fn lex(_lexeme: &Lexeme) -> Quote { Quote::default() }
+ fn lex(_lexeme: &Lexeme) -> Quote {
+ panic!("Attempt to lex a quote directly from a lexeme")
+ }
fn render(&self) -> String {
let opening = if let Some(url) = &self.url {
@@ -47,7 +49,13 @@ impl Parseable for Quote {
format!("\n{opening}\n{content}\n\n")
}
- fn flatten(&self) -> String { String::default() }
+ fn flatten(&self) -> String {
+ if let Some(citation) = &self.citation {
+ format!(r#""{}" -- {}"#, self.text, citation)
+ } else {
+ format!(r#""{}""#, self.text)
+ }
+ }
}
impl std::fmt::Display for Quote {
@@ -63,3 +71,96 @@ impl std::fmt::Display for Quote {
write!(f, "Quote [{}]", meta.trim())
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::syntax::content::parser::Token;
+
+ #[test]
+ fn display() {
+ let mut quote_slim = Quote::default();
+ quote_slim.text = "iXh0141J7B8P46Gv".to_string();
+
+ println!("{quote_slim}");
+ assert!(format!("{quote_slim}").contains("Quote"));
+ assert!(!format!("{quote_slim}").contains("+url"));
+ assert!(!format!("{quote_slim}").contains("+citation"));
+ assert_eq!(format!("{}", Token::Quote(quote_slim)), "Tk:Quote []");
+
+ let mut quote_cited = Quote::default();
+ quote_cited.text = "iXh0141J7B8P46Gv".to_string();
+ quote_cited.citation = Some("k8Fy7htmvi2NG7yh".to_string());
+
+ println!("{quote_cited}");
+ assert!(format!("{quote_cited}").contains("Quote"));
+ assert!(!format!("{quote_cited}").contains("+url"));
+ assert!(format!("{quote_cited}").contains("+citation"));
+ assert_eq!(
+ format!("{}", Token::Quote(quote_cited)),
+ "Tk:Quote [+citation]",
+ );
+
+ let mut quote_with_url = Quote::default();
+ quote_with_url.text = "iXh0141J7B8P46Gv".to_string();
+ quote_with_url.url = Some("CttVJU2IHDsjSjao".to_string());
+
+ println!("{quote_with_url}");
+ assert!(format!("{quote_with_url}").contains("Quote"));
+ assert!(format!("{quote_with_url}").contains("+url"));
+ assert!(!format!("{quote_with_url}").contains("+citation"));
+ assert_eq!(
+ format!("{}", Token::Quote(quote_with_url)),
+ "Tk:Quote [+url]",
+ );
+
+ let mut quote_full = Quote::default();
+ quote_full.text = "iXh0141J7B8P46Gv".to_string();
+ quote_full.citation = Some("k8Fy7htmvi2NG7yh".to_string());
+ quote_full.url = Some("CttVJU2IHDsjSjao".to_string());
+
+ println!("{quote_full}");
+ assert!(format!("{quote_full}").contains("Quote"));
+ assert!(format!("{quote_full}").contains("+url"));
+ assert!(format!("{quote_full}").contains("+citation"));
+ assert_eq!(
+ format!("{}", Token::Quote(quote_full)),
+ "Tk:Quote [+url +citation]",
+ );
+ }
+
+ #[test]
+ fn flatten() {
+ assert_eq!(Quote::default().flatten(), r#""""#);
+
+ let mut without_citation = Quote::default();
+ let text = "AphyFDQHVbkOeaNw";
+ without_citation.text = text.to_string();
+ assert_eq!(without_citation.flatten(), format!(r#""{text}""#));
+
+ let without_citation_token = Token::Quote(without_citation);
+ assert_eq!(without_citation_token.flatten(), format!(r#""{text}""#));
+
+ let mut with_citation = Quote::default();
+ let citation = "B35rcofYM0J7";
+ with_citation.text = text.to_string();
+ with_citation.citation = Some(citation.to_string());
+ assert_eq!(
+ with_citation.flatten(),
+ format!(r#""{text}" -- {citation}"#)
+ );
+
+ let with_citation_token = Token::Quote(with_citation);
+ assert_eq!(
+ with_citation_token.flatten(),
+ format!(r#""{text}" -- {citation}"#)
+ );
+ }
+
+ #[test]
+ #[should_panic(expected = "Attempt to lex a quote directly from a lexeme")]
+ fn lex() {
+ let lexeme = Lexeme::new("z2UI", "FiCd", "rtq4");
+ Quote::lex(&lexeme);
+ }
+}
diff --git a/src/syntax/content/parser/token/strike.rs b/src/syntax/content/parser/token/strike.rs
index 2d98822..14b1dc4 100644
--- a/src/syntax/content/parser/token/strike.rs
+++ b/src/syntax/content/parser/token/strike.rs
@@ -69,5 +69,8 @@ mod tests {
fn flatten() {
let strike = Strike::new(false);
assert_eq!(strike.flatten(), "");
+
+ let token = Token::Strike(strike);
+ assert_eq!(token.flatten(), "");
}
}
diff --git a/src/syntax/content/parser/token/table.rs b/src/syntax/content/parser/token/table.rs
index 2eb29e3..f1dbae7 100644
--- a/src/syntax/content/parser/token/table.rs
+++ b/src/syntax/content/parser/token/table.rs
@@ -25,6 +25,7 @@ impl Table {
}
}
+ /// Counts the number of cells in the last row.
pub fn last_row_count(&self) -> usize {
if let Some(last) = self.contents.last() {
last.len()
@@ -37,7 +38,9 @@ impl Table {
impl Parseable for Table {
fn probe(lexeme: &Lexeme) -> bool { lexeme.match_char_sequence('%', '\n') }
- fn lex(_lexeme: &Lexeme) -> Table { Table::default() }
+ fn lex(_lexeme: &Lexeme) -> Table {
+ panic!("Attempt to lex a table directly from a lexeme")
+ }
fn render(&self) -> String {
let mut xml = String::from("\n\n");
@@ -67,11 +70,100 @@ impl Parseable for Table {
xml
}
- fn flatten(&self) -> String { String::default() }
+ fn flatten(&self) -> String { String::from("[Table]") }
}
impl std::fmt::Display for Table {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
- write!(f, "Table")
+ let headers_width = self.headers.len();
+ let contents_height = self.contents.len();
+ let contents_width = self.last_row_count();
+
+ let mut extra = String::default();
+ if headers_width > 0 && contents_height > 0 {
+ extra = format!(
+ " [{contents_width}x{contents_height} +{headers_width} headers]"
+ );
+ } else if headers_width > 0 {
+ extra = format!(" [+{headers_width} headers]");
+ } else if contents_height > 0 {
+ extra = format!(" [{contents_width}x{contents_height}]");
+ }
+
+ write!(f, "Table{extra}")
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::syntax::content::parser::Token;
+
+ #[test]
+ #[should_panic(expected = "Attempt to lex a table directly from a lexeme")]
+ fn lex() {
+ let lexeme = Lexeme::new("tp0h", "rrFt", "Qouf");
+ Table::lex(&lexeme);
+ }
+
+ #[test]
+ fn flatten() {
+ assert_eq!(Table::default().flatten(), "[Table]");
+ assert_eq!(Token::Table(Table::default()).flatten(), "[Table]");
+ }
+
+ #[test]
+ fn display() {
+ use std::string::ToString;
+
+ let mut table = Table::default();
+ table.add_header("A");
+ table.add_header("B");
+ table.add_header("C");
+
+ let table_token = Token::Table(table.clone());
+ assert_eq!(format!("{table}"), format!("Table [+3 headers]"));
+ assert_eq!(format!("{table_token}"), format!("Tk:Table [+3 headers]"));
+
+ table.add_row(
+ ["1", "2", "3"]
+ .iter()
+ .map(ToString::to_string)
+ .collect::>(),
+ );
+ table.add_row(
+ ["4", "5", "6"]
+ .iter()
+ .map(ToString::to_string)
+ .collect::>(),
+ );
+ table.add_row(
+ ["7", "8", "9"]
+ .iter()
+ .map(ToString::to_string)
+ .collect::>(),
+ );
+
+ let table_token2 = Token::Table(table.clone());
+ assert_eq!(format!("{table}"), "Table [3x3 +3 headers]");
+ assert_eq!(format!("{table_token2}"), "Tk:Table [3x3 +3 headers]");
+
+ let mut table2 = Table::default();
+ table2.add_row(
+ ["1", "2", "3"]
+ .iter()
+ .map(ToString::to_string)
+ .collect::>(),
+ );
+ table2.add_row(
+ ["2", "4", "6"]
+ .iter()
+ .map(ToString::to_string)
+ .collect::>(),
+ );
+
+ let table2_token = Token::Table(table2.clone());
+ assert_eq!(format!("{table2}"), "Table [3x2]");
+ assert_eq!(format!("{table2_token}"), "Tk:Table [3x2]");
}
}
diff --git a/src/syntax/content/parser/token/underline.rs b/src/syntax/content/parser/token/underline.rs
index e8e2668..0cd565e 100644
--- a/src/syntax/content/parser/token/underline.rs
+++ b/src/syntax/content/parser/token/underline.rs
@@ -75,5 +75,8 @@ mod tests {
fn flatten() {
let underline = Underline::new(false);
assert_eq!(underline.flatten(), "");
+
+ let token = Token::Underline(underline);
+ assert_eq!(token.flatten(), "");
}
}
diff --git a/src/syntax/content/parser/token/verse.rs b/src/syntax/content/parser/token/verse.rs
index 0c0d696..f3f7910 100644
--- a/src/syntax/content/parser/token/verse.rs
+++ b/src/syntax/content/parser/token/verse.rs
@@ -55,6 +55,7 @@ impl std::fmt::Display for Verse {
#[cfg(test)]
mod tests {
use super::*;
+ use crate::syntax::content::parser::Token;
#[test]
fn lexed_verse_is_empty() {
@@ -63,9 +64,12 @@ mod tests {
}
#[test]
- fn flat_verse_is_empty() {
+ fn flatten() {
let verse = Verse::new(true);
assert!(verse.flatten().is_empty());
+
+ let token = Token::Verse(verse);
+ assert_eq!(token.flatten(), "");
}
#[test]
@@ -80,12 +84,18 @@ mod tests {
#[test]
fn display() {
let open = Verse::new(true);
+ let open_token = Token::Verse(open.clone());
assert_eq!(format!("{open}"), "Verse [open]");
+ assert_eq!(format!("{open_token}"), "Tk:Verse [open]");
let closed = Verse::new(false);
+ let closed_token = Token::Verse(closed.clone());
assert_eq!(format!("{closed}"), "Verse [closed]");
+ assert_eq!(format!("{closed_token}"), "Tk:Verse [closed]");
let unknown = Verse::lex(&Lexeme::default());
+ let unknown_token = Token::Verse(unknown.clone());
assert_eq!(format!("{unknown}"), "Verse [unknown]");
+ assert_eq!(format!("{unknown_token}"), "Tk:Verse [unknown]");
}
}