Compare commits

...

10 commits

53 changed files with 1054 additions and 713 deletions

View file

@ -3,6 +3,7 @@ on:
paths:
- src/**
- tests/**
- static/graph.toml
- .forgejo/**
- Cargo.toml
- Cargo.lock
@ -27,7 +28,8 @@ jobs:
- name: Setup Rust toolchain
run: |
rustup component add rustfmt clippy llvm-tools-preview
rustup component add clippy llvm-tools-preview
rustup component add --toolchain nightly rustfmt
- name: Setup additional tooling
run: |

View file

@ -47,8 +47,8 @@ alias qr := quick-assess-run-watch
[private]
quick-test-cover:
{{ cover_cmd }} --no-report -- --skip 'serial_tests::'
{{ cover_cmd }} --no-report -- --test 'serial_tests::' --test-threads 1
{{ cover_cmd }} --no-report -- --skip 'serial_tests::'
{{ cover_cmd }} report --html
@{{ cover_cmd }} report | tail -1 | awk '{ print " [ Regions:", $4, "• Functions:", $7, "• Lines:", $10, "]" }'
@ -62,14 +62,14 @@ alias qo := quick-test-cover-watch
# Format all files
[group: 'develop']
format:
cargo fmt
cargo +nightly fmt
alias f := format
# Lint
[group: 'develop']
lint:
cargo clippy
cargo +nightly clippy
alias l := lint
@ -94,9 +94,16 @@ rustc-fix:
alias rf := rustc-fix
# Apply clippy lint fixes
[group: 'develop']
clippy-fix:
cargo +nightly clippy --fix --allow-dirty
alias cf := clippy-fix
# Apply all automatic fixes
[group: 'develop']
fix: rustc-fix format
fix: rustc-fix clippy-fix format
alias x := fix
@ -162,15 +169,16 @@ alias do := doc-open
# Assess formatting
[group: 'assess']
format-assess:
cargo fmt -- --check
cargo +nightly fmt -- --check
alias fc := format-assess
# Assess production lints
[group: 'assess']
lint-assess:
cargo clippy -- \
-D clippy::dbg_macro -D clippy::print_stdout -D clippy::print_stderr \
cargo +nightly clippy -- \
-D clippy::dbg_macro -D clippy::use_debug \
-D clippy::print_stdout -D clippy::print_stderr \
-D clippy::todo -D clippy::unimplemented -D clippy::unreachable
alias la := lint-assess
@ -185,8 +193,10 @@ alias c := check
# Run tests
[group: 'assess']
test:
cargo test -- --skip 'serial_tests::'
cargo test -- --test 'serial_tests::' --test-threads 1
cargo test --bin en
cargo test --doc
cargo test --lib -- --skip 'serial_tests::'
alias t := test
@ -200,8 +210,8 @@ alias oc := test-cover-clean
# Run tests with coverage
[group: 'assess']
test-cover: test-cover-clean
{{ cover_cmd }} --no-report -- --skip 'serial_tests::'
{{ cover_cmd }} --no-report -- --test 'serial_tests::' --test-threads 1
{{ cover_cmd }} --no-report -- --skip 'serial_tests::'
alias o := test-cover

View file

@ -1,25 +1,33 @@
match_block_trailing_comma = true
unstable_features = true
max_width = 80
reorder_imports = false
reorder_modules = false
inline_attribute_width = 40
skip_macro_invocations = ["concat"]
imports_granularity = "Crate"
group_imports = "StdExternalCrate"
fn_single_line = true
match_block_trailing_comma = true
use_field_init_shorthand = true
use_try_shorthand = true
hex_literal_case = "Lower"
where_single_line = true
condense_wildcard_suffixes = true
combine_control_expr = false
empty_item_single_line = true
reorder_impl_items = true
trailing_semicolon = false
# blank_lines_lower_bound = 1
# not stabilized yet
# where_single_line = true
# overflow_delimited_expr = true
# normalize_doc_attributes = true
# normalize_comments = true
# inline_attribute_width = 40
# imports_granularity = "Crate"
# hex_literal_case = "Lower"
# group_imports = "StdExternalCrate"
# format_strings = true
# force_multiline_blocks = true
# error_on_unformatted = true
# error_on_line_overflow = true
# condense_wildcard_suffixes = true
# doc_comment_code_block_width = 70
# format_code_in_doc_comments = true
# wrap_comments = true
wrap_comments = true
normalize_comments = true
normalize_doc_attributes = true
format_code_in_doc_comments = true
doc_comment_code_block_width = 70
error_on_unformatted = true
error_on_line_overflow = true
ignore = [
"tests/mocks",
]

View file

@ -34,101 +34,205 @@ let_underscore= { level = "warn", priority = 10 }
nonstandard-style = "warn"
future-incompatible = "warn"
keyword-idents = "warn"
non_ascii_idents = "warn"
[lints.clippy]
# levels: allow, warn, deny, forbid
manual_non_exhaustive = "allow"
collapsible_if = "allow"
collapsible_else_if = "allow"
field_reassign_with_default = "allow"
# pedantic
allow_attributes = "warn"
arithmetic_side_effects = "warn"
as_conversions = "warn"
as_pointer_underscore = "warn"
as_underscore = "warn"
assigning_clones = "warn"
borrow_as_ptr = "warn"
branches_sharing_code = "warn"
case_sensitive_file_extension_comparisons = "warn"
cast_lossless = "warn"
cast_possible_truncation = "warn"
cast_possible_wrap = "warn"
cast_ptr_alignment = "warn"
cast_precision_loss = "warn"
cast_sign_loss = "warn"
checked_conversions = "warn"
clear_with_drain = "warn"
cloned_instead_of_copied = "warn"
coerce_container_to_any = "warn"
collapsible_else_if = "allow"
collapsible_if = "allow"
collection_is_never_read = "warn"
comparison_chain = "warn"
copy_iterator = "warn"
default_trait_access = "warn"
deref_by_slicing = "warn"
derive_partial_eq_without_eq = "warn"
doc_broken_link = "warn"
doc_comment_double_space_linebreaks = "warn"
doc_include_without_cfg = "warn"
doc_link_code = "warn"
doc_link_with_quotes = "warn"
doc_markdown = "warn"
doc_paragraphs_missing_punctuation = "warn"
duration_suboptimal_units = "warn"
empty_drop = "warn"
empty_enum_variants_with_brackets = "warn"
empty_enums = "warn"
empty_structs_with_brackets = "warn"
equatable_if_let = "warn"
error_impl_error = "warn"
exit = "warn"
expect_used = "warn"
expl_impl_clone_on_copy = "warn"
explicit_deref_methods = "warn"
explicit_into_iter_loop = "warn"
explicit_iter_loop = "warn"
fallible_impl_from = "warn"
field_reassign_with_default = "allow"
filetype_is_file = "warn"
filter_map_next = "warn"
flat_map_option = "warn"
float_cmp = "warn"
float_cmp_const = "warn"
fn_to_numeric_cast_any = "warn"
format_collect = "warn"
format_push_string = "warn"
from_iter_instead_of_collect = "warn"
get_unwrap = "warn"
if_not_else = "warn"
if_then_some_else_none = "warn"
ignore_without_reason = "warn"
ignored_unit_patterns = "warn"
implicit_clone = "warn"
implicit_hasher = "warn"
imprecise_flops = "warn"
inconsistent_struct_constructor = "warn"
index_refutable_slice = "warn"
indexing_slicing = "warn"
inefficient_to_string = "warn"
infinite_loop = "warn"
integer_division = "warn"
integer_division_remainder_used = "warn"
into_iter_without_iter = "warn"
invalid_upcast_comparisons = "warn"
ip_constant = "warn"
iter_filter_is_ok = "warn"
iter_filter_is_some = "warn"
iter_not_returning_iterator = "warn"
iter_on_empty_collections = "warn"
iter_on_single_items = "warn"
iter_with_drain = "warn"
iter_without_into_iter = "warn"
large_digit_groups = "warn"
large_futures = "warn"
large_stack_arrays = "warn"
large_types_passed_by_value = "warn"
let_underscore_must_use = "warn"
linkedlist = "warn"
literal_string_with_formatting_args = "warn"
lossy_float_literal = "warn"
macro_use_imports = "warn"
manual_assert = "warn"
manual_ilog2 = "warn"
manual_instant_elapsed = "warn"
manual_is_power_of_two = "warn"
manual_is_variant_and = "warn"
manual_let_else = "warn"
manual_midpoint = "warn"
manual_non_exhaustive = "allow"
manual_string_new = "warn"
many_single_char_names = "warn"
map_unwrap_or = "warn"
map_err_ignore = "warn"
map_with_unused_argument_over_ranges = "warn"
match_bool = "warn"
match_same_arms = "warn"
match_wild_err_arm = "warn"
match_wildcard_for_single_variants = "warn"
maybe_infinite_iter = "warn"
mem_forget = "warn"
mismatching_type_param_order = "warn"
missing_assert_message = "warn"
missing_asserts_for_indexing = "warn"
missing_const_for_fn = "warn"
missing_errors_doc = "warn"
missing_fields_in_debug = "warn"
missing_panics_doc = "warn"
mixed_read_write_in_expression = "warn"
mod_module_files = "warn"
module_name_repetitions = "warn"
multiple_inherent_impl = "warn"
mut_mut = "warn"
mutex_atomic = "warn"
mutex_integer = "warn"
naive_bytecount = "warn"
needless_collect = "warn"
needless_continue = "warn"
needless_for_each = "warn"
needless_pass_by_ref_mut = "warn"
needless_pass_by_value = "warn"
needless_raw_string_hashes = "warn"
needless_raw_strings = "warn"
needless_type_cast = "warn"
no_effect_underscore_binding = "warn"
non_send_fields_in_send_ty = "warn"
non_std_lazy_statics = "warn"
non_zero_suggestions = "warn"
nonstandard_macro_braces = "warn"
option_as_ref_cloned = "warn"
option_option = "warn"
ptr_as_ptr = "warn"
ptr_cast_constness = "warn"
panic_in_result_fn = "warn"
path_buf_push_overwrite = "warn"
pathbuf_init_then_push = "warn"
pub_underscore_fields = "warn"
pub_without_shorthand = "warn"
range_minus_one = "warn"
range_plus_one = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
read_zero_byte_vec = "warn"
redundant_clone = "warn"
redundant_closure_for_method_calls = "warn"
ref_as_ptr = "warn"
redundant_pub_crate = "warn"
redundant_test_prefix = "warn"
redundant_type_annotations = "warn"
ref_binding_to_reference = "warn"
ref_option = "warn"
ref_option_ref = "warn"
renamed_function_params = "warn"
rest_pat_in_fully_bound_structs = "warn"
return_and_then = "warn"
return_self_not_must_use = "warn"
same_functions_in_if_condition = "warn"
same_length_and_capacity = "warn"
same_name_method = "warn"
search_is_some = "warn"
self_only_used_in_recursion = "warn"
semicolon_if_nothing_returned = "warn"
semicolon_inside_block = "warn"
set_contains_or_insert = "warn"
shadow_reuse = "warn"
shadow_same = "warn"
shadow_unrelated = "warn"
should_panic_without_expect = "warn"
similar_names = "warn"
single_char_pattern = "warn"
single_match_else = "warn"
single_option_map = "warn"
stable_sort_primitive = "warn"
str_split_at_newline = "warn"
string_add = "warn"
string_add_assign = "warn"
string_lit_as_bytes = "warn"
string_lit_chars_any = "warn"
string_slice = "warn"
struct_field_names = "warn"
suboptimal_flops = "warn"
suspicious_operation_groupings = "warn"
suspicious_xor_used_as_pow = "warn"
tests_outside_test_module = "warn"
too_long_first_doc_paragraph = "warn"
trait_duplication_in_bounds = "warn"
trivially_copy_pass_by_ref = "warn"
try_err = "warn"
tuple_array_conversions = "warn"
type_repetition_in_bounds = "warn"
unchecked_time_subtraction = "warn"
unicode_not_nfc = "warn"
uninlined_format_args = "warn"
@ -136,72 +240,34 @@ unnecessary_box_returns = "warn"
unnecessary_debug_formatting = "warn"
unnecessary_join = "warn"
unnecessary_literal_bound = "warn"
unnecessary_self_imports = "warn"
unnecessary_semicolon = "warn"
unnecessary_struct_initialization = "warn"
unnecessary_wraps = "warn"
unneeded_field_pattern = "warn"
unnested_or_patterns = "warn"
unreadable_literal = "warn"
unsafe_derive_deserialize = "warn"
unseparated_literal_suffix = "warn"
unused_async = "warn"
unused_peekable = "warn"
unused_result_ok = "warn"
unused_rounding = "warn"
unused_self = "warn"
unused_trait_names = "warn"
unwrap_in_result = "warn"
unwrap_used = "warn"
used_underscore_binding = "warn"
used_underscore_items = "warn"
useless_let_if_seq = "warn"
verbose_file_reads = "warn"
volatile_composites = "warn"
wildcard_enum_match_arm = "warn"
wildcard_imports = "warn"
zero_sized_map_values = "warn"
# restrictive
arithmetic_side_effects = "warn"
as_conversions = "warn"
as_pointer_underscore = "warn"
as_underscore = "warn"
deref_by_slicing = "warn"
empty_drop = "warn"
empty_enum_variants_with_brackets = "warn"
error_impl_error = "warn"
exit = "warn"
expect_used = "warn"
filetype_is_file = "warn"
float_cmp_const = "warn"
fn_to_numeric_cast_any = "warn"
if_then_some_else_none = "warn"
indexing_slicing = "warn"
infinite_loop = "warn"
integer_division = "warn"
integer_division_remainder_used = "warn"
let_underscore_must_use = "warn"
let_underscore_untyped = "warn"
lossy_float_literal = "warn"
map_err_ignore = "warn"
map_with_unused_argument_over_ranges = "warn"
missing_assert_message = "warn"
missing_asserts_for_indexing = "warn"
mixed_read_write_in_expression = "warn"
module_name_repetitions = "warn"
multiple_inherent_impl = "warn"
needless_raw_strings = "warn"
non_zero_suggestions = "warn"
panic_in_result_fn = "warn"
pathbuf_init_then_push = "warn"
pub_without_shorthand = "warn"
redundant_test_prefix = "warn"
redundant_type_annotations = "warn"
renamed_function_params = "warn"
rest_pat_in_fully_bound_structs = "warn"
return_and_then = "warn"
same_name_method = "warn"
semicolon_outside_block = "warn"
shadow_reuse = "warn"
shadow_same = "warn"
shadow_unrelated = "warn"
string_add = "warn"
string_lit_chars_any = "warn"
unnecessary_self_imports = "warn"
unneeded_field_pattern = "warn"
unseparated_literal_suffix = "warn"
unused_result_ok = "warn"
unused_trait_names = "warn"
unwrap_used = "warn"
verbose_file_reads = "warn"
wildcard_enum_match_arm = "warn"
# cargo
negative_feature_names = "warn"
redundant_feature_names = "warn"
multiple_crate_versions = "warn"
wildcard_dependencies = "warn"

View file

@ -1,24 +1,24 @@
use std::{collections::HashMap, path::PathBuf};
use serde::{Serialize, Deserialize};
pub use edge::Edge;
pub use meta::{Config, Meta};
pub use node::Node;
use serde::{Deserialize, Serialize};
use crate::syntax::{
command::Arguments,
content::{
self,
parser::{flatten, Token, token::Anchor},
use crate::{
prelude::*,
syntax::{
command::Arguments,
content::{
self,
parser::{Token, flatten, token::Anchor},
},
},
};
use crate::prelude::*;
pub use {
node::Node,
edge::Edge,
meta::{Meta, Config},
};
pub mod node;
pub mod edge;
pub mod meta;
pub mod node;
#[derive(Serialize, Deserialize, Clone, Default, PartialEq, Eq, Debug)]
pub struct Graph {
@ -78,7 +78,7 @@ impl Graph {
}
}
/// Takes a file path to a TOML file and returns a modulated Graph
/// Takes a file path to a TOML file and returns a modulated Graph.
///
/// If `path` is None, it will fallback to CLI arguments or their defaults.
///
@ -99,11 +99,18 @@ impl Graph {
let cli_path = Arguments::default().parse().graph_path;
let path = in_path.map_or(cli_path, PathBuf::from);
let toml_source = match std::fs::read_to_string(path) {
let toml_source = match std::fs::read_to_string(&path) {
Ok(s) => s,
Err(e) => {
log!(ERROR, "Failed reading {e}");
return Err("Failed reading file at {path}".to_string());
log!(
ERROR,
"Error reading path {}: {e}",
path.as_path().display(),
);
return Err(format!(
"Failed reading file at {}",
path.as_path().display(),
));
},
};
@ -192,7 +199,7 @@ impl Graph {
tlog!(&instant, "Parsed configuration");
}
/// Construct a `HashMap` with incoming connections (reversed edges)
/// Construct a `HashMap` with incoming connections (reversed edges).
fn map_incoming(&mut self) {
for node in self.nodes.clone().into_values() {
for edge in node.connections.clone().values() {
@ -372,7 +379,7 @@ impl Graph {
}
}
/// Increments detached node statistics for the given node ID
/// Increments detached node statistics for the given node ID.
///
/// Performs checked arithmetic to the following effect:
/// - Stats will saturate at `u32::MAX`
@ -392,14 +399,14 @@ impl Graph {
}
pub fn find_node(&self, query: &str) -> QueryResult {
let collapsed_query = query.trim().replace(" ", "");
let collapsed_query = query.trim().replace(' ', "");
if query == collapsed_query {
log!(VERBOSE, "Chasing candidate for query {query}");
} else {
log!(
VERBOSE,
"Chasing candidate for query {query}, collapsed {collapsed_query}"
"Chasing candidate: query {query}, collapsed {collapsed_query}"
);
}

View file

@ -1,4 +1,4 @@
use serde::{Serialize, Deserialize};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Default, PartialEq, Eq, Debug)]
pub struct Edge {

View file

@ -1,6 +1,6 @@
use crate::prelude::*;
use serde::{Deserialize, Serialize};
use serde::{Serialize, Deserialize};
use crate::prelude::*;
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)]
pub struct Meta {
@ -105,15 +105,9 @@ impl Default for Config {
}
// See: https://github.com/serde-rs/serde/issues/368
fn mktrue() -> bool {
true
}
fn mkfalse() -> bool {
false
}
fn mk8() -> u16 {
8
}
const fn mktrue() -> bool { true }
const fn mkfalse() -> bool { false }
const fn mk8() -> u16 { 8 }
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)]
pub struct Version {
@ -154,7 +148,7 @@ impl Version {
Version::from_text(env!("CARGO_PKG_VERSION"))
}
/// Parses a string into a Version struct
/// Parses a string into a Version struct.
///
/// It is expected for the version string to contain exactly three
/// dot-separated numeric values with an optional leading `v` character.
@ -314,9 +308,8 @@ impl std::fmt::Display for VersionErrorCause {
#[cfg(test)]
mod tests {
use crate::graph::Graph;
use super::*;
use crate::graph::Graph;
#[test]
fn empty_footer_text() {

View file

@ -1,6 +1,6 @@
use std::collections::HashMap;
use serde::{Serialize, Deserialize};
use serde::{Deserialize, Serialize};
use super::edge::Edge;
@ -118,7 +118,9 @@ mod tests {
assert_eq!(
format!("{node}"),
format!(
"Node 404 [title:'Not Found' text:15l summary:{} redirect:{redirect}]",
"Node 404 [title:'Not Found' \
text:15l summary:{} \
redirect:{redirect}]",
summary.len(),
)
);

View file

@ -1,16 +1,17 @@
use std::{sync, time};
pub mod prelude {
pub use crate::log;
pub use crate::tlog;
pub use crate::log::Level::*;
pub use crate::log::now;
pub use crate::{
log,
log::{Level::*, now},
tlog, write_log,
};
}
pub mod graph;
pub mod log;
pub mod router;
pub mod syntax;
pub mod log;
pub static ONSET: sync::LazyLock<time::Instant> =
sync::LazyLock::new(time::Instant::now);

View file

@ -4,7 +4,7 @@ pub use level::*;
mod level;
/// Strings in this slice suppress logging if found in the stack trace
/// Strings in this slice suppress logging if found in the stack trace.
pub const EXCLUSIONS: &[&str] = &["en::graph::Graph::parse_config"];
#[derive(Debug)]
@ -32,7 +32,7 @@ impl Data {
let path = make_display_path(captured_path, &env_level);
let is_silent = env_level <= Level::SILENT;
let message_level_is_within_env_level = message_level <= env_level;
let level_within_env_level = message_level <= env_level;
let excluded_in_code =
!EXCLUSIONS.iter().all(|&s| !trace_string.contains(s));
let excluded_by_env =
@ -41,17 +41,17 @@ impl Data {
filter.is_empty() || captured_path.contains(&filter);
let should_log = !is_silent
&& message_level_is_within_env_level
&& level_within_env_level
&& !excluded_in_code
&& !excluded_by_env
&& matches_filter;
#[allow(clippy::print_stderr)]
#[expect(clippy::print_stderr)]
if env_level == Level::META {
eprintln!(
"Log decision for message from {path}: {should_log} given\n\
is_silent: {is_silent} (expected false)\n\
message_level_is_within_env_level: {message_level_is_within_env_level}\n\
level_within_env_level: {level_within_env_level}\n\
excluded_in_code: {excluded_in_code} (expected false)\n\
excluded_by_env: {excluded_by_env} (expected false)\n\
matches_filter: {matches_filter}\n\
@ -79,7 +79,7 @@ pub fn env_level() -> Level {
}
}
#[allow(clippy::print_stderr)]
#[expect(clippy::print_stderr)]
pub fn print_state() {
let env_level = env_level();
let version = env!("CARGO_PKG_VERSION");
@ -90,7 +90,7 @@ pub fn print_state() {
}
}
#[allow(clippy::print_stderr)]
#[expect(clippy::print_stderr)]
pub fn timed(past: &Instant, message: &str) -> Instant {
let now = Instant::now();
let env_level = env_level();
@ -118,11 +118,9 @@ macro_rules! tlog {
}};
}
pub fn now() -> Instant {
Instant::now()
}
pub fn now() -> Instant { Instant::now() }
#[allow(clippy::print_stderr)]
#[expect(clippy::print_stderr, clippy::use_debug)]
pub fn elog(function: &str, message: &str) {
eprintln!("{:?} [{function}] {message}", crate::ONSET.elapsed());
}
@ -211,13 +209,22 @@ pub fn wrap(s: &str) -> String {
}
}
fn escape(s: &str) -> String {
s.escape_debug().collect()
}
fn escape(s: &str) -> String { s.escape_debug().collect() }
symbolize(&quote(&escape(s)))
}
#[macro_export]
macro_rules! write_log {
($buffer:expr, $format_string:expr $(, $format_args:expr)* $(,)?) => {{
use std::fmt::Write as _;
let result = write!($buffer, $format_string $(, $format_args)*);
if let Err(error) = result {
log!(ERROR, "Unexpected error writing into {}: ${error}", $buffer);
}
}};
}
#[cfg(test)]
mod tests {
use super::*;
@ -243,7 +250,7 @@ mod tests {
}
fn run_in_debug_level(level: &str) {
#[allow(unsafe_code)]
#[expect(unsafe_code)]
unsafe {
std::env::set_var("DEBUG", level);
log!("Debug is set to {level}");

View file

@ -3,7 +3,7 @@ use std::{backtrace, io, panic};
use en::{ONSET, graph::Graph, log, prelude::*, syntax};
#[tokio::main]
#[allow(clippy::print_stderr, clippy::print_stdout)]
#[expect(clippy::print_stderr, clippy::print_stdout, clippy::use_debug)]
async fn main() -> io::Result<()> {
log::print_state();
let mut instant = now();

View file

@ -3,13 +3,13 @@ use axum::{Router, routing::get};
use crate::graph::Graph;
mod handlers {
pub mod graph;
pub mod template;
pub mod raw;
pub mod navigation;
pub mod fixed;
pub mod error;
pub mod fixed;
pub mod graph;
pub mod mime;
pub mod navigation;
pub mod raw;
pub mod template;
}
#[derive(Clone)]
@ -49,11 +49,6 @@ pub fn new(graph: Graph) -> Router {
#[cfg(test)]
mod tests {
use crate::{
graph::{Graph, Config, Meta},
};
use super::*;
use axum::{
body::Body,
http::{Request, StatusCode},
@ -61,6 +56,9 @@ mod tests {
};
use tower::ServiceExt as _;
use super::*;
use crate::graph::{Config, Graph, Meta};
async fn request(uri: &str, config: Option<&Config>) -> Response<Body> {
let default_graph = Graph::load();
let graph = Graph {

View file

@ -68,10 +68,8 @@ pub async fn not_found(State(state): State<GlobalState>) -> Response<Body> {
#[cfg(test)]
mod tests {
use axum::{
http::{StatusCode},
extract::State,
};
use axum::{extract::State, http::StatusCode};
use super::*;
#[tokio::test]

View file

@ -1,16 +1,13 @@
use axum::{
body::Body,
extract::{Path, State},
http::{HeaderValue, Response, StatusCode, header},
{
body::Body,
extract::{Path, State},
},
};
use crate::prelude::*;
use crate::{
graph::{Format, Graph, SerialErrorCause},
router::{GlobalState, handlers},
router::handlers::mime::Mime,
prelude::*,
router::{GlobalState, handlers, handlers::mime::Mime},
};
pub async fn file(

View file

@ -1,7 +1,8 @@
use axum::{
extract::State,
response::IntoResponse as _,
{body::Body, extract::Path, http::Response, response::Redirect},
body::Body,
extract::{Path, State},
http::Response,
response::{IntoResponse as _, Redirect},
};
use crate::{
@ -36,26 +37,20 @@ pub async fn node(
"node",
&context,
if found { 500 } else { 404 },
Some(
format!(
"Failed to generate page for node {} (ID {}).",
node.title, id
)
.to_owned(),
),
Some(format!(
"Failed to generate page for node {} (ID {}).",
node.title, id
)),
!found,
)
}
#[cfg(test)]
mod tests {
use axum::{
http::{HeaderName, StatusCode},
};
use crate::graph::{Format, Graph};
use axum::http::{HeaderName, StatusCode};
use super::*;
use crate::graph::{Format, Graph};
async fn wrap_node(query: &str) -> Response<Body> {
let state = GlobalState {

View file

@ -1,4 +1,6 @@
use axum::{Form, body::Body, extract::State, http::Response, response::Redirect};
use axum::{
Form, body::Body, extract::State, http::Response, response::Redirect,
};
use crate::{
prelude::*,
@ -31,7 +33,7 @@ pub async fn data(State(state): State<GlobalState>) -> Response<Body> {
let mut detached_pairs: Vec<(String, u32)> =
state.graph.stats.detached.clone().into_iter().collect();
detached_pairs.sort_by(|a, b| b.1.cmp(&a.1));
detached_pairs.sort_by_key(|b| std::cmp::Reverse(b.1));
let mut context = tera::Context::default();
context.insert("graph", &state.graph);
@ -58,9 +60,9 @@ pub struct Query {
#[cfg(test)]
mod tests {
use axum::http::StatusCode;
use crate::graph::Graph;
use super::*;
use crate::graph::Graph;
async fn wrap_page(path: &str) -> Response<Body> {
let state = GlobalState {

View file

@ -1,6 +1,6 @@
use axum::{
body::Body,
http::{header, HeaderValue, Response, StatusCode},
http::{HeaderValue, Response, StatusCode, header},
};
use crate::prelude::*;

View file

@ -1,6 +1,6 @@
use axum::{
body::Body,
http::{header, Response, StatusCode},
http::{Response, StatusCode, header},
};
use crate::{
@ -8,7 +8,7 @@ use crate::{
router::{GlobalState, handlers::raw::make_response},
};
/// Assembles a response containing the graph as its only context
/// Assembles a response containing the graph as its only context.
///
/// The template name **must not** contain the extension.
#[expect(clippy::unused_async)]
@ -38,7 +38,7 @@ pub(in crate::router::handlers) fn with_context(
make_response(&body, status_code, &[(header::CONTENT_TYPE, "text/html")])
}
/// Renderes a template into a String and error code
/// Renderes a template into a String and error code.
///
/// The template name **must not** contain the extension (e.g. `.html`).
pub(in crate::router::handlers) fn render(
@ -135,9 +135,8 @@ fn emergency_wrap(error: &tera::Error) -> String {
#[cfg(test)]
mod tests {
use crate::graph::Graph;
use super::*;
use crate::graph::Graph;
#[test]
fn by_filename_forced_error() {

View file

@ -7,7 +7,7 @@ use crate::prelude::*;
static FIRST_PARSE: AtomicBool = AtomicBool::new(true);
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Arguments {
pub hostname: String,
pub port: u16,

View file

@ -1,6 +1,6 @@
use std::mem::discriminant;
use parser::{Token, Lexeme};
use parser::{Lexeme, Token};
use crate::graph::Graph;
@ -43,9 +43,7 @@ impl TokenOutput {
}
}
pub fn parse(text: &str, graph: &Graph) -> String {
parser::read(text, graph)
}
pub fn parse(text: &str, graph: &Graph) -> String { parser::read(text, graph) }
pub fn rich_parse(text: &str, graph: &Graph) -> TokenOutput {
parser::rich_read(text, graph)
@ -53,9 +51,8 @@ pub fn rich_parse(text: &str, graph: &Graph) -> TokenOutput {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::token::{Bold, Oblique};
use super::*;
use crate::syntax::content::parser::token::{Bold, Oblique};
#[test]
fn only() {

View file

@ -1,15 +1,18 @@
use crate::{prelude::*, graph::Graph, syntax::content::TokenOutput};
use context::{Block, Inline};
pub use lexeme::Lexeme;
use lexer::{LEXMAP, lex};
pub use {lexeme::Lexeme, token::Token, state::State};
pub use state::State;
pub use token::Token;
use crate::{graph::Graph, prelude::*, syntax::content::TokenOutput};
pub mod token;
pub mod lexer;
pub mod lexeme;
pub mod segment;
pub mod context;
pub mod lexeme;
pub mod lexer;
pub mod point;
pub mod segment;
pub mod state;
pub mod token;
fn parse(tokens: &[Token]) -> String {
tokens.iter().map(Token::render).collect::<String>()
@ -30,7 +33,7 @@ pub(super) fn rich_read(input: &str, graph: &Graph) -> TokenOutput {
}
/// Apply end-to-end point and inline parsing for nested formatting, such as
/// inside the display text of anchors and list items
/// inside the display text of anchors and list items.
pub fn format(input: &str, graph: &Graph) -> (String, Vec<Token>) {
let tokens = lex(input, LEXMAP, graph, false).tokens;
(parse(&tokens), tokens)
@ -46,16 +49,10 @@ pub fn flatten(input: &str, graph: &Graph) -> String {
#[cfg(test)]
mod tests {
use crate::{
graph::Graph,
syntax::content::parser::{token::header::Level},
};
use super::*;
use crate::{graph::Graph, syntax::content::parser::token::header::Level};
fn read_noconfig(input: &str) -> String {
read(input, &Graph::default())
}
fn read_noconfig(input: &str) -> String { read(input, &Graph::default()) }
#[test]
fn empty_render_is_empty() {
@ -65,7 +62,10 @@ mod tests {
#[test]
fn mixed_sample() {
let en = "`this |test|` tries ## to |brea|k|: things";
let html = r#"<p><code>this |test|</code> tries ## to <a class="detached" title="" href="/node/k">brea</a>: things</p>"#;
let html = concat!(
r#"<p><code>this |test|</code> tries ## to <a "#,
r#"class="detached" title="" href="/node/k">brea</a>: things</p>"#,
);
assert_eq!(read_noconfig(en), html);
}

View file

@ -3,9 +3,9 @@ use crate::syntax::content::parser::{
token::{Header, Paragraph, PreFormat, Verse},
};
pub mod anchor;
pub mod block;
pub mod inline;
pub mod anchor;
pub mod list;
pub mod quote;
pub mod table;
@ -68,7 +68,7 @@ pub fn close(state: &State, tokens: &mut Vec<Token>) {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::{context::Block, State};
use crate::syntax::content::parser::{State, context::Block};
#[test]
#[should_panic(expected = "End of input with open list")]

View file

@ -1,7 +1,7 @@
use crate::{
prelude::*,
syntax::content::parser::{context::Inline, Lexeme, State, Token},
graph::Graph,
prelude::*,
syntax::content::parser::{Lexeme, State, Token, context::Inline},
};
/// Handles open anchor contexts until an anchor token is fully parsed.
@ -61,7 +61,7 @@ pub fn parse(
&& !lexeme.match_next_char('|')
{
log!(VERBOSE, "End: Plural anchor");
candidate.set_destination(Some(&candidate.text().clone()));
candidate.set_destination(Some(&candidate.text()));
candidate.text_push("s");
if lexeme.last() {
push(None, tokens, state, graph);
@ -73,7 +73,7 @@ pub fn parse(
if candidate.text().contains(':') {
candidate.set_external(true);
}
push(Some(&candidate.text().clone()), tokens, state, graph);
push(Some(&candidate.text()), tokens, state, graph);
} else {
push(Some(&buffer.destination.clone()), tokens, state, graph);
}
@ -162,11 +162,9 @@ fn push(
#[cfg(test)]
mod tests {
use crate::{syntax::content::parser, graph::Graph};
use crate::{graph::Graph, syntax::content::parser};
fn read(input: &str) -> String {
parser::read(input, &Graph::default())
}
fn read(input: &str) -> String { parser::read(input, &Graph::default()) }
#[test]
fn flanking() {
@ -188,7 +186,10 @@ mod tests {
fn flanking_with_trailing_comma_and_space() {
assert_eq!(
read("|Node|, at"),
r#"<p><a class="detached" title="" href="/node/Node">Node</a>, at</p>"#
concat!(
r#"<p><a class="detached" title="" "#,
r#"href="/node/Node">Node</a>, at</p>"#,
)
);
}
@ -204,7 +205,8 @@ mod tests {
fn needless_three_pipe_anchor() {
assert_eq!(
read("|Node|Destination|"),
r#"<p><a class="detached" title="" href="/node/Destination">Node</a></p>"#
concat!(r#"<p><a class="detached" title="" "#,
r#"href="/node/Destination">Node</a></p>"#)
);
}
@ -212,7 +214,10 @@ mod tests {
fn nonleading_second_pipe() {
assert_eq!(
read("Go to Node|Destination|, here"),
r#"<p>Go to <a class="detached" title="" href="/node/Destination">Node</a>, here</p>"#,
concat!(
r#"<p>Go to <a class="detached" title="" "#,
r#"href="/node/Destination">Node</a>, here</p>"#
),
);
}
@ -220,7 +225,9 @@ mod tests {
fn anchor_to_node_s() {
assert_eq!(
read("The |letter s|s|'s node: |s|!"),
r#"<p>The <a class="detached" title="" href="/node/s">letter s</a>'s node: <a class="detached" title="" href="/node/s">s</a>!</p>"#
concat!(r#"<p>The <a class="detached" title="" "#,
r#"href="/node/s">letter s</a>'s node: "#,
r#"<a class="detached" title="" href="/node/s">s</a>!</p>"#)
);
}
@ -228,7 +235,10 @@ mod tests {
fn nonleading_plural_anchor() {
assert_eq!(
read("The flower|s bloomed"),
r#"<p>The <a class="detached" title="" href="/node/flower">flowers</a> bloomed</p>"#
concat!(
r#"<p>The <a class="detached" title="" "#,
r#"href="/node/flower">flowers</a> bloomed</p>"#,
)
);
}
@ -236,7 +246,9 @@ mod tests {
fn leading_plural_anchor() {
assert_eq!(
read("Interfaces are |element|s of |system|s."),
r#"<p>Interfaces are <a class="detached" title="" href="/node/element">elements</a> of <a class="detached" title="" href="/node/system">systems</a>.</p>"#
concat!(r#"<p>Interfaces are <a class="detached" title="" "#,
r#"href="/node/element">elements</a> of <a class="detached" "#,
r#"title="" href="/node/system">systems</a>.</p>"#)
);
}
@ -244,7 +256,11 @@ mod tests {
fn leading_multiword_anchor() {
assert_eq!(
read("interactions are |basic elements| of systems"),
r#"<p>interactions are <a class="detached" title="" href="/node/basic elements">basic elements</a> of systems</p>"#
concat!(
r#"<p>interactions are <a class="detached" title="""#,
r#" href="/node/basic elements">basic elements</a> "#,
r#"of systems</p>"#,
),
);
}
@ -252,7 +268,9 @@ mod tests {
fn explicit_end_of_destination() {
assert_eq!(
read("interactions are |basic elements|BasicElements| of systems"),
r#"<p>interactions are <a class="detached" title="" href="/node/BasicElements">basic elements</a> of systems</p>"#
concat!(r#"<p>interactions are <a class="detached" title="" "#,
r#"href="/node/BasicElements">basic elements</a> of "#,
r#"systems</p>"#)
);
}
@ -260,7 +278,11 @@ mod tests {
fn explicit_end_of_external_destination() {
assert_eq!(
read("this |anchor example|https://example.com| is external"),
r#"<p>this <a class="external" title="" href="https://example.com">anchor example</a> is external</p>"#
concat!(
r#"<p>this <a class="external" title="" "#,
r#"href="https://example.com">anchor example</a> is "#,
r#"external</p>"#
)
);
}
@ -276,7 +298,10 @@ mod tests {
fn external_anchor_destination_at_eoi() {
assert_eq!(
read("a b|https://example.com"),
r#"<p>a <a class="external" title="" href="https://example.com">b</a></p>"#
concat!(
r#"<p>a <a class="external" title="" "#,
r#"href="https://example.com">b</a></p>"#,
)
);
}
@ -284,7 +309,10 @@ mod tests {
fn nonleading_plural_anchor_at_eoi() {
assert_eq!(
read("element|s"),
r#"<p><a class="detached" title="" href="/node/element">elements</a></p>"#
concat!(
r#"<p><a class="detached" title="" "#,
r#"href="/node/element">elements</a></p>"#,
)
);
}
@ -292,17 +320,22 @@ mod tests {
fn leading_plural_anchor_at_eoi() {
assert_eq!(
read("|element|s"),
r#"<p><a class="detached" title="" href="/node/element">elements</a></p>"#
concat!(
r#"<p><a class="detached" title="" "#,
r#"href="/node/element">elements</a></p>"#,
)
);
}
#[test]
fn http_external_anchor() {
assert_eq!(
read(
"a |false dichotomy|https://en.wikipedia.org/wiki/False_dilemma|."
read("a |false dichotomy|https://wikipedia.org/False_dilemma|."),
concat!(
r#"<p>a <a class="external" title="" "#,
r#"href="https://wikipedia.org/False_dilemma">"#,
r#"false dichotomy</a>.</p>"#,
),
r#"<p>a <a class="external" title="" href="https://en.wikipedia.org/wiki/False_dilemma">false dichotomy</a>.</p>"#
);
}
@ -315,7 +348,8 @@ mod tests {
"at rustup.rs",
)),
concat!(
r#"<p><a class="external" title="" href="https://rustup.rs/">Rust toolchain</a>"#,
r#"<p><a class="external" title="" "#,
r#"href="https://rustup.rs/">Rust toolchain</a>"#,
"\n",
"at rustup.rs</p>",
)
@ -326,7 +360,11 @@ mod tests {
fn http_external_anchor_leading_no_third_then_space() {
assert_eq!(
read("|Rust toolchain|https://rustup.rs/ at rustup.rs"),
r#"<p><a class="external" title="" href="https://rustup.rs/">Rust toolchain</a> at rustup.rs</p>"#
concat!(
r#"<p><a class="external" title="" "#,
r#"href="https://rustup.rs/">Rust toolchain</a> "#,
r#"at rustup.rs</p>"#,
),
);
}
@ -334,7 +372,10 @@ mod tests {
fn http_external_anchor_leading_no_third_then_eoi() {
assert_eq!(
read("|Rust toolchain|https://rustup.rs/"),
r#"<p><a class="external" title="" href="https://rustup.rs/">Rust toolchain</a></p>"#
concat!(
r#"<p><a class="external" title="" "#,
r#"href="https://rustup.rs/">Rust toolchain</a></p>"#,
)
);
}
@ -344,7 +385,10 @@ mod tests {
read("\n|SomeAnchor|\n"),
concat!(
"\n",
r#"<p><a class="detached" title="" href="/node/SomeAnchor">SomeAnchor</a></p>"#
concat!(
r#"<p><a class="detached" title="" "#,
r#"href="/node/SomeAnchor">SomeAnchor</a></p>"#,
)
),
);
}
@ -354,9 +398,11 @@ mod tests {
assert_eq!(
read("|SomeAnchor|\n|SomeOtherAnchor|\n"),
concat!(
r#"<p><a class="detached" title="" href="/node/SomeAnchor">SomeAnchor</a>"#,
r#"<p><a class="detached" title="" "#,
r#"href="/node/SomeAnchor">SomeAnchor</a>"#,
"\n",
r#"<a class="detached" title="" href="/node/SomeOtherAnchor">SomeOtherAnchor</a></p>"#
r#"<a class="detached" title="" "#,
r#"href="/node/SomeOtherAnchor">SomeOtherAnchor</a></p>"#,
)
);
}
@ -366,10 +412,12 @@ mod tests {
assert_eq!(
read("|SomeAnchor|\n\n|SomeOtherAnchor|\n"),
concat!(
r#"<p><a class="detached" title="" href="/node/SomeAnchor">SomeAnchor</a></p>"#,
r#"<p><a class="detached" title="" "#,
r#"href="/node/SomeAnchor">SomeAnchor</a></p>"#,
"\n",
"\n",
r#"<p><a class="detached" title="" href="/node/SomeOtherAnchor">SomeOtherAnchor</a></p>"#
r#"<p><a class="detached" title="" "#,
r#"href="/node/SomeOtherAnchor">SomeOtherAnchor</a></p>"#,
),
);
}
@ -378,7 +426,10 @@ mod tests {
fn trailing_anchor() {
assert_eq!(
read("see acks|acks"),
r#"<p>see <a class="detached" title="" href="/node/acks">acks</a></p>"#
concat!(
r#"<p>see <a class="detached" title="" "#,
r#"href="/node/acks">acks</a></p>"#,
)
);
}
@ -388,8 +439,9 @@ mod tests {
read("\nsee acks|acks\n"),
concat!(
"\n",
r#"<p>see <a class="detached" title="" href="/node/acks">acks</a></p>"#
)
r#"<p>see <a class="detached" title="" "#,
r#"href="/node/acks">acks</a></p>"#,
),
);
}
@ -417,7 +469,10 @@ mod tests {
fn anchor_with_trailing_single_quote() {
assert_eq!(
read("the |lion|'s mouth"),
r#"<p>the <a class="detached" title="" href="/node/lion">lion</a>'s mouth</p>"#,
concat!(
r#"<p>the <a class="detached" title="" "#,
r#"href="/node/lion">lion</a>'s mouth</p>"#,
)
);
}
@ -425,7 +480,10 @@ mod tests {
fn anchor_with_trailing_double_quote() {
assert_eq!(
read(r#"the "|real|" motive"#),
r#"<p>the "<a class="detached" title="" href="/node/real">real</a>" motive</p>"#,
concat!(
r#"<p>the "<a class="detached" title="" "#,
r#"href="/node/real">real</a>" motive</p>"#,
)
);
}
@ -433,7 +491,10 @@ mod tests {
fn anchor_with_trailing_parenthesis() {
assert_eq!(
read("this (though |true|) was questioned"),
r#"<p>this (though <a class="detached" title="" href="/node/true">true</a>) was questioned</p>"#,
concat!(
r#"<p>this (though <a class="detached" title="" "#,
r#"href="/node/true">true</a>) was questioned</p>"#,
)
);
}
@ -441,7 +502,10 @@ mod tests {
fn anchor_with_leading_single_quote() {
assert_eq!(
read("the 'real|Reality' motive"),
r#"<p>the '<a class="detached" title="" href="/node/Reality">real</a>' motive</p>"#,
concat!(
r#"<p>the '<a class="detached" title="" "#,
r#"href="/node/Reality">real</a>' motive</p>"#,
)
);
}
@ -449,7 +513,10 @@ mod tests {
fn anchor_with_leading_double_quote() {
assert_eq!(
read(r#"the "real|Reality" motive"#),
r#"<p>the "<a class="detached" title="" href="/node/Reality">real</a>" motive</p>"#,
concat!(
r#"<p>the "<a class="detached" title="" "#,
r#"href="/node/Reality">real</a>" motive</p>"#,
)
);
}
@ -457,7 +524,10 @@ mod tests {
fn anchor_with_leading_parenthesis() {
assert_eq!(
read("her (last|Surname) name"),
r#"<p>her (<a class="detached" title="" href="/node/Surname">last</a>) name</p>"#,
concat!(
r#"<p>her (<a class="detached" title="" "#,
r#"href="/node/Surname">last</a>) name</p>"#,
)
);
}
@ -465,7 +535,10 @@ mod tests {
fn anchor_with_internal_apostrophe() {
assert_eq!(
read("the |lion's mouth|album was released"),
r#"<p>the <a class="detached" title="" href="/node/album">lion's mouth</a> was released</p>"#
concat!(
r#"<p>the <a class="detached" title="" "#,
r#"href="/node/album">lion's mouth</a> was released</p>"#,
)
);
}
@ -473,7 +546,10 @@ mod tests {
fn nonleading_anchor_with_internal_apostrophe() {
assert_eq!(
read("they decided to stay at Jane's|YellowHouse that night"),
r#"<p>they decided to stay at <a class="detached" title="" href="/node/YellowHouse">Jane's</a> that night</p>"#
concat!(
r#"<p>they decided to stay at <a class="detached" title="" "#,
r#"href="/node/YellowHouse">Jane's</a> that night</p>"#,
)
);
}
@ -481,7 +557,10 @@ mod tests {
fn nonleading_anchor_with_internal_apostrophe_at_eoi() {
assert_eq!(
read("they decided to stay at Jane's|YellowHouse"),
r#"<p>they decided to stay at <a class="detached" title="" href="/node/YellowHouse">Jane's</a></p>"#
concat!(
r#"<p>they decided to stay at <a class="detached" "#,
r#"title="" href="/node/YellowHouse">Jane's</a></p>"#,
)
);
}
@ -489,7 +568,10 @@ mod tests {
fn nonleading_anchor_with_internal_apostrophe_at_soi() {
assert_eq!(
read("Jane's|YellowHouse that night"),
r#"<p><a class="detached" title="" href="/node/YellowHouse">Jane's</a> that night</p>"#
concat!(
r#"<p><a class="detached" title="" "#,
r#"href="/node/YellowHouse">Jane's</a> that night</p>"#,
)
);
}
@ -497,7 +579,10 @@ mod tests {
fn anchor_with_internal_double_quotes() {
assert_eq!(
read(r#"the |"real"|Truth motive"#),
r#"<p>the <a class="detached" title="" href="/node/Truth">"real"</a> motive</p>"#,
concat!(
r#"<p>the <a class="detached" title="" "#,
r#"href="/node/Truth">"real"</a> motive</p>"#,
)
);
}
@ -505,7 +590,10 @@ mod tests {
fn anchor_with_internal_double_quotes_wrapping_spaced_words() {
assert_eq!(
read(r#"the |"bare reality"|Ideology they believed"#),
r#"<p>the <a class="detached" title="" href="/node/Ideology">"bare reality"</a> they believed</p>"#,
concat!(
r#"<p>the <a class="detached" title="" "#,
r#"href="/node/Ideology">"bare reality"</a> they believed</p>"#,
)
);
}
@ -513,7 +601,10 @@ mod tests {
fn anchor_with_internal_parenthesis() {
assert_eq!(
read("her |last (name)|Surname was Amad"),
r#"<p>her <a class="detached" title="" href="/node/Surname">last (name)</a> was Amad</p>"#,
concat!(
r#"<p>her <a class="detached" title="" "#,
r#"href="/node/Surname">last (name)</a> was Amad</p>"#,
)
);
}
@ -521,7 +612,11 @@ mod tests {
fn anchor_with_internal_parenthesis_wrapping_spaced_words() {
assert_eq!(
read("this |truth (though questionable) was fine|Absurd to them "),
r#"<p>this <a class="detached" title="" href="/node/Absurd">truth (though questionable) was fine</a> to them</p>"#
concat!(
r#"<p>this <a class="detached" title="" "#,
r#"href="/node/Absurd">truth (though questionable) was "#,
r#"fine</a> to them</p>"#,
)
);
}
}

View file

@ -8,7 +8,7 @@ use crate::{
parser::{
Block, Lexeme, State, Token,
token::{
Header, List, LineBreak, Literal, Paragraph, PreFormat, Quote,
Header, LineBreak, List, Literal, Paragraph, PreFormat, Quote,
Table, Verse,
},
},
@ -112,7 +112,7 @@ pub fn parse(
iterator.next();
return true;
} else if lexeme.match_char('\n') {
tokens.push(Token::LineBreak(LineBreak::default()));
tokens.push(Token::LineBreak(LineBreak));
return true;
}
},
@ -124,16 +124,14 @@ pub fn parse(
mod tests {
use crate::{
syntax::content::parser::{
self, Block, Token, context, State,
token::{Header, header::Level, PreFormat},
},
graph::Graph,
syntax::content::parser::{
self, Block, State, Token, context,
token::{Header, PreFormat, header::Level},
},
};
fn read(input: &str) -> String {
parser::read(input, &Graph::default())
}
fn read(input: &str) -> String { parser::read(input, &Graph::default()) }
#[test]
fn pre() {

View file

@ -1,17 +1,16 @@
use std::{iter::Peekable, slice::Iter};
use crate::{
graph::Graph,
prelude::*,
syntax::content::{
Parseable as _,
parser::{
Lexeme, State,
Inline, Lexeme, State, Token, context,
state::AnchorBuffer,
Inline, context, Token,
token::{Anchor, Code, Literal},
},
},
graph::Graph,
};
pub fn parse(
@ -49,11 +48,10 @@ pub fn parse(
log!(VERBOSE, "Inline Context: Code -> None on {lexeme}");
state.context.inline = Inline::None;
tokens.push(Token::Code(Code::new(false)));
return true;
} else {
tokens.push(Token::Literal(Literal::lex(lexeme)));
return true;
}
return true;
},
Inline::Anchor => {
if context::anchor::parse(lexeme, state, tokens, graph) {

View file

@ -1,11 +1,11 @@
use std::{iter::Peekable, slice::Iter};
use crate::{
graph::Graph,
prelude::*,
syntax::content::parser::{
context::Block, Token, Lexeme, State, state, token::Item, format,
Lexeme, State, Token, context::Block, format, state, token::Item,
},
graph::Graph,
};
/// Handles open list contexts until a list is fully parsed.
@ -30,7 +30,7 @@ pub fn parse(
let candidate = &mut buffer.candidate;
let item_candidate = &mut buffer.item_candidate;
#[allow(clippy::wildcard_enum_match_arm)]
#[expect(clippy::wildcard_enum_match_arm)]
match state.context.block {
Block::List => {
if lexeme.match_char(' ') && item_candidate.depth.is_none() {
@ -88,13 +88,11 @@ pub fn parse(
#[cfg(test)]
mod tests {
use crate::{
syntax::content::parser::{self, context::list::parse, Lexeme, State},
graph::Graph,
syntax::content::parser::{self, Lexeme, State, context::list::parse},
};
fn read(input: &str) -> String {
parser::read(input, &Graph::default())
}
fn read(input: &str) -> String { parser::read(input, &Graph::default()) }
#[test]
fn unordered_list() {

View file

@ -26,7 +26,7 @@ pub fn parse(
let buffer = &mut state.buffers.quote;
let candidate = &mut buffer.candidate;
#[allow(clippy::wildcard_enum_match_arm)]
#[expect(clippy::wildcard_enum_match_arm)]
match state.context.block {
Block::Quote => {
if Quote::probe_end(lexeme) {

View file

@ -32,7 +32,7 @@ pub fn parse(
parsed_text
};
#[allow(clippy::wildcard_enum_match_arm)]
#[expect(clippy::wildcard_enum_match_arm)]
match state.context.block {
Block::Table => {
if Table::probe_end(lexeme) {
@ -63,7 +63,6 @@ pub fn parse(
log!(VERBOSE, "Adding row: found newline on {lexeme}");
if !buffer.cell.is_empty() {
if buffer.in_header {
log!(VERBOSE, "Adding unterminated header: {lexeme}");
candidate.add_header(&parse_text(&buffer.cell));
@ -86,7 +85,6 @@ pub fn parse(
buffer.in_header = false;
buffer.in_cell = false;
candidate.add_row(vec![]);
} else if lexeme.match_char_triple(' ', '!', ' ') {
buffer.in_header = true;
if !buffer.cell.trim().is_empty() {
@ -116,3 +114,334 @@ pub fn parse(
}
true
}
#[cfg(test)]
mod tests {
use crate::{graph::Graph, syntax::content::parser};
fn read(input: &str) -> String { parser::read(input, &Graph::default()) }
fn read_loaded(input: &str) -> String {
parser::read(input, &Graph::load())
}
#[test]
fn single_row() {
assert_eq!(
read(concat!("%", "\n", "a | b | c", "\n", "%", "\n")),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <td>a</td>", "\n",
" <td>b</td>", "\n",
" <td>c</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn two_rows() {
assert_eq!(
read(concat!(
"%", "\n",
"a | b | c", "\n",
"d | e | f", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <td>a</td>", "\n",
" <td>b</td>", "\n",
" <td>c</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn three_rows() {
assert_eq!(
read(concat!(
"%", "\n",
"a | b | c", "\n",
"d | e | f", "\n",
"g | h | i", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <td>a</td>", "\n",
" <td>b</td>", "\n",
" <td>c</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>g</td>", "\n",
" <td>h</td>", "\n",
" <td>i</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn with_header() {
assert_eq!(
read(concat!(
"%", "\n",
"hA ! hB ! hC", "\n",
"a | b | c", "\n",
"d | e | f", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <th>hA</th>", "\n",
" <th>hB</th>", "\n",
" <th>hC</th>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>a</td>", "\n",
" <td>b</td>", "\n",
" <td>c</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn with_anchor() {
assert_eq!(
read(concat!(
"%", "\n",
"a | |Node| | c", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <td>a</td>", "\n",
r#" <td><a class="detached" title="" "#,
r#"href="/node/Node">Node</a></td>"#, "\n",
" <td>c</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn with_loaded_anchor() {
assert_eq!(
read_loaded(concat!(
"%", "\n",
"a | |Node| | c", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <td>a</td>", "\n",
r#" <td><a class="attached" title="A node is defined "#,
r#"in your graph file starting with a table header of the "#,
r#"form:" href="/node/Node">Node</a></td>"#, "\n",
" <td>c</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn no_leading_delimiters() {
assert_eq!(
read_loaded(concat!(
"%", "\n",
"a ! b ! c !", "\n",
"d | e | f |", "\n",
"g | h | i |", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <th>a</th>", "\n",
" <th>b</th>", "\n",
" <th>c</th>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>g</td>", "\n",
" <td>h</td>", "\n",
" <td>i</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn no_trailing_delimiters() {
assert_eq!(
read_loaded(concat!(
"%", "\n",
" ! a ! b ! c", "\n",
" | d | e | f", "\n",
" | g | h | i", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <th>a</th>", "\n",
" <th>b</th>", "\n",
" <th>c</th>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>g</td>", "\n",
" <td>h</td>", "\n",
" <td>i</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn with_leading_and_trailing_delimiters() {
assert_eq!(
read_loaded(concat!(
"%", "\n",
" ! a ! b ! c !", "\n",
" | d | e | f |", "\n",
" | g | h | i |", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <th>a</th>", "\n",
" <th>b</th>", "\n",
" <th>c</th>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>g</td>", "\n",
" <td>h</td>", "\n",
" <td>i</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn no_flanking_delimiters() {
assert_eq!(
read_loaded(concat!(
"%", "\n",
"a ! b ! c", "\n",
"d | e | f", "\n",
"g | h | i", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <th>a</th>", "\n",
" <th>b</th>", "\n",
" <th>c</th>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>g</td>", "\n",
" <td>h</td>", "\n",
" <td>i</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
#[test]
fn with_indent() {
assert_eq!(
read_loaded(concat!(
"%", "\n",
" ! a ! b ! c !", "\n",
" | d | e | f |", "\n",
" | g | h | i |", "\n",
"%", "\n",
)),
concat!(
"\n",
"<table>", "\n",
" <tr>", "\n",
" <th>a</th>", "\n",
" <th>b</th>", "\n",
" <th>c</th>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>d</td>", "\n",
" <td>e</td>", "\n",
" <td>f</td>", "\n",
" </tr>", "\n",
" <tr>", "\n",
" <td>g</td>", "\n",
" <td>h</td>", "\n",
" <td>i</td>", "\n",
" </tr>", "\n",
"</table>", "\n",
)
);
}
}

View file

@ -1,6 +1,6 @@
use std::fmt;
use crate::{syntax::content::parser::segment::delimiter::Delimiters};
use crate::syntax::content::parser::segment::delimiter::Delimiters;
#[derive(Clone, Debug, Default)]
pub struct Lexeme {
@ -22,25 +22,15 @@ impl Lexeme {
}
}
pub fn text(&self) -> String {
self.text.clone()
}
pub fn text(&self) -> String { self.text.clone() }
pub fn next(&self) -> String {
self.next.clone()
}
pub fn next(&self) -> String { self.next.clone() }
pub fn last(&self) -> bool {
self.last
}
pub const fn last(&self) -> bool { self.last }
pub fn first(&self) -> bool {
self.first
}
pub const fn first(&self) -> bool { self.first }
pub fn mutate_text(&mut self, new: &str) {
self.text = new.to_string();
}
pub fn mutate_text(&mut self, new: &str) { self.text = new.to_string(); }
pub fn as_char(&self) -> Option<char> {
if self.text.chars().count() == 1 {
@ -141,9 +131,7 @@ impl Lexeme {
.is_some_and(|c| delimiters.is_delimiter(c))
}
pub fn next_first_char(&self) -> Option<char> {
self.next.chars().nth(0)
}
pub fn next_first_char(&self) -> Option<char> { self.next.chars().nth(0) }
pub fn match_first_char(&self, query: char) -> bool {
self.text.chars().nth(0).is_some_and(|c| c == query)
@ -158,7 +146,7 @@ impl Lexeme {
}
/// # Panics
/// Panics if number of chars for a single lexeme exceeds `i32::MAX`
/// Panics if number of chars for a single lexeme exceeds `i32::MAX`.
pub fn count_char(&self, c: char) -> i32 {
let count = self.text().chars().filter(|&n| n == c).count();
match i32::try_from(count) {

View file

@ -1,13 +1,14 @@
use crate::{
prelude::*,
graph::Graph,
prelude::*,
syntax::content::{
TokenOutput, Parseable as _, LexMap,
LexMap, Parseable as _, TokenOutput,
parser::{
context,
lexeme::Lexeme,
token::{Token, LineBreak, Literal},
point, segment,
state::State,
segment, context, point,
token::{LineBreak, Literal, Token},
},
},
};

View file

@ -17,9 +17,9 @@ pub fn parse(
tokens: &mut Vec<Token>,
iterator: &mut Peekable<Iter<'_, Lexeme>>,
) -> bool {
if let super::context::Block::PreFormat = state.context.block {
return false;
} else if let super::context::Inline::Code = state.context.inline {
if matches!(state.context.block, super::context::Block::PreFormat)
|| matches!(state.context.inline, super::context::Inline::Code)
{
return false;
}
@ -58,17 +58,18 @@ pub fn parse(
#[cfg(test)]
mod tests {
use crate::{syntax::content::parser, graph::Graph};
use crate::{graph::Graph, syntax::content::parser};
fn read(input: &str) -> String {
parser::read(input, &Graph::default())
}
fn read(input: &str) -> String { parser::read(input, &Graph::default()) }
#[test]
fn oblique_anchor() {
assert_eq!(
read("w _|S|_ w"),
r#"<p>w <em><a class="detached" title="" href="/node/S">S</a></em> w</p>"#
concat!(
r#"<p>w <em><a class="detached" title="" "#,
r#"href="/node/S">S</a></em> w</p>"#,
)
);
}
@ -76,7 +77,8 @@ mod tests {
fn oblique_anchor_with_trailing_comma() {
assert_eq!(
read("w _|S|_, w"),
r#"<p>w <em><a class="detached" title="" href="/node/S">S</a></em>, w</p>"#
concat!(r#"<p>w <em><a class="detached" title="" "#,
r#"href="/node/S">S</a></em>, w</p>"#)
);
}
@ -84,9 +86,17 @@ mod tests {
fn oblique() {
assert_eq!(
read(
"_|this anchor is oblique|o as are these literals_ but not these _just these_, not this _and these with an |anc80r| again_"
"_|this anchor is oblique|o as are these literals_ but not \
these _just these_, not this _and these with an |anc80r| \
again_"
),
r#"<p><em><a class="detached" title="" href="/node/o">this anchor is oblique</a> as are these literals</em> but not these <em>just these</em>, not this <em>and these with an <a class="detached" title="" href="/node/anc80r">anc80r</a> again</em></p>"#
concat!(
r#"<p><em><a class="detached" title="" href="/node/o">"#,
r#"this anchor is oblique</a> as are these literals</em> "#,
r#"but not these <em>just these</em>, not this <em>and these "#,
r#"with an <a class="detached" title="" "#,
r#"href="/node/anc80r">anc80r</a> again</em></p>"#,
)
);
}

View file

@ -1,6 +1,4 @@
pub fn segment(text: &str) -> Vec<String> {
delimiter::atomize(text)
}
pub fn segment(text: &str) -> Vec<String> { delimiter::atomize(text) }
pub mod delimiter {
@ -146,7 +144,8 @@ pub mod delimiter {
fn atomize_flankign_sentence() {
assert_eq!(
atomize(
"about_colors: the colors _amber_, _orange_ and _yellow mustard_ to `jane_bishop@mail.com`."
"about_colors: the colors _amber_, _orange_ and \
_yellow mustard_ to `jane_bishop@mail.com`."
),
vec![
"about_colors",
@ -188,7 +187,8 @@ pub mod delimiter {
#[test]
fn atomize_words() {
let actual = atomize(
" justification for the actions of those who hold authority inevitably dwindles ",
" justification for the actions of those who hold \
authority inevitably dwindles ",
);
let expected = vec![
" ",
@ -285,7 +285,8 @@ pub mod delimiter {
#[test]
fn atomize_pipes_and_ticks() {
let actual = atomize(
"every other |time| as `it could or |perhaps somehow|then or now| it was` perceived",
"every other |time| as `it could or |perhaps somehow|then or \
now| it was` perceived",
);
let expected = vec![
"every",

View file

@ -101,24 +101,24 @@ mod tests {
#[test]
fn anchor_buffer_display_with_text_set() {
let mut buffer = AnchorBuffer::default();
buffer.text = String::from("mX8Z7yWmsK");
buffer.text = String::from("mX8Z7sK");
println!("{buffer:#?}");
println!("{buffer}");
assert_eq!(
format!("{buffer}"),
r#"AnchorBuffer [text: "mX8Z7yWmsK"] >> Anchor <empty> -> <unknown>"#
r#"AnchorBuffer [text: "mX8Z7sK"] >> Anchor <empty> -> <unknown>"#
);
}
#[test]
fn anchor_buffer_display_with_destination_set() {
let mut buffer = AnchorBuffer::default();
buffer.destination = String::from("VP2aqGngAq");
buffer.destination = String::from("VP2gAq");
println!("{buffer:#?}");
println!("{buffer}");
assert_eq!(
format!("{buffer}"),
r#"AnchorBuffer [, dest: "VP2aqGngAq"] >> Anchor <empty> -> <unknown>"#
r#"AnchorBuffer [, dest: "VP2gAq"] >> Anchor <empty> -> <unknown>"#
);
}
@ -131,7 +131,10 @@ mod tests {
println!("{buffer}");
assert_eq!(
format!("{buffer}"),
r#"AnchorBuffer [text: "ECJrzgkBHg", dest: "9dy6gQ2g3E"] >> Anchor <empty> -> <unknown>"#
concat!(
r#"AnchorBuffer [text: "ECJrzgkBHg", dest: "9dy6gQ2g3E"] "#,
r#">> Anchor <empty> -> <unknown>"#,
)
);
}
}

View file

@ -1,4 +1,4 @@
use crate::syntax::content::{Parseable as _};
use crate::syntax::content::Parseable as _;
pub mod anchor;
pub mod bold;
@ -18,12 +18,23 @@ pub mod table;
pub mod underline;
pub mod verse;
pub use {
anchor::Anchor, bold::Bold, checkbox::CheckBox, code::Code, header::Header,
item::Item, linebreak::LineBreak, list::List, literal::Literal,
oblique::Oblique, paragraph::Paragraph, preformat::PreFormat, quote::Quote,
strike::Strike, table::Table, underline::Underline, verse::Verse,
};
pub use anchor::Anchor;
pub use bold::Bold;
pub use checkbox::CheckBox;
pub use code::Code;
pub use header::Header;
pub use item::Item;
pub use linebreak::LineBreak;
pub use list::List;
pub use literal::Literal;
pub use oblique::Oblique;
pub use paragraph::Paragraph;
pub use preformat::PreFormat;
pub use quote::Quote;
pub use strike::Strike;
pub use table::Table;
pub use underline::Underline;
pub use verse::Verse;
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum Token {

View file

@ -1,6 +1,6 @@
use crate::{
syntax::content::{Parseable, parser::Lexeme},
graph::Node,
syntax::content::{Parseable, parser::Lexeme},
};
#[derive(Default, Debug, Clone, Eq, PartialEq)]
@ -15,58 +15,42 @@ pub struct Anchor {
}
impl Anchor {
pub fn text(&self) -> String {
self.text.clone()
}
pub fn text(&self) -> String { self.text.clone() }
pub fn set_text(&mut self, text: &str) {
self.text = String::from(text);
}
pub fn set_text(&mut self, text: &str) { self.text = String::from(text); }
pub fn text_push(&mut self, text: &str) {
self.text.push_str(text);
}
pub fn text_push(&mut self, text: &str) { self.text.push_str(text); }
pub fn destination(&self) -> Option<String> {
self.destination.clone()
}
pub fn destination(&self) -> Option<String> { self.destination.clone() }
pub fn set_destination(&mut self, destination: Option<&str>) {
self.destination = destination.map(str::to_string);
self.route();
}
pub fn balanced(&self) -> bool {
self.balanced
}
pub const fn balanced(&self) -> bool { self.balanced }
pub fn set_balanced(&mut self, balanced: bool) {
pub const fn set_balanced(&mut self, balanced: bool) {
self.balanced = balanced;
}
pub fn external(&self) -> bool {
self.external
}
pub const fn external(&self) -> bool { self.external }
pub fn set_external(&mut self, external: bool) {
pub const fn set_external(&mut self, external: bool) {
self.external = external;
}
pub fn set_leading(&mut self, leading: bool) {
pub const fn set_leading(&mut self, leading: bool) {
self.leading = leading;
}
pub fn node(&self) -> Option<Node> {
self.node.clone()
}
pub fn node(&self) -> Option<Node> { self.node.clone() }
pub fn set_node(&mut self, node: &Node) {
self.node = Some(node.to_owned());
}
pub fn node_id(&self) -> Option<String> {
self.node_id.clone()
}
pub fn node_id(&self) -> Option<String> { self.node_id.clone() }
pub fn set_node_id(&mut self, id: &str) {
self.node_id = Some(id.to_owned());
@ -74,7 +58,7 @@ impl Anchor {
fn route(&mut self) {
self.destination = if let Some(destination) = self.destination.clone() {
if destination.contains(":") || destination.contains("/") {
if destination.contains(':') || destination.contains('/') {
Some(destination)
} else if destination.is_empty() && self.text.is_empty() {
None
@ -105,7 +89,8 @@ impl Parseable for Anchor {
fn render(&self) -> String {
let Some(destination) = &self.destination else {
panic!(
"Attempt to render anchor {self:#?} without knowing its destination."
"Attempt to render anchor {self:#?} without knowing \
its destination."
)
};
@ -131,9 +116,7 @@ impl Parseable for Anchor {
)
}
fn flatten(&self) -> String {
self.text.clone()
}
fn flatten(&self) -> String { self.text.clone() }
}
impl std::fmt::Display for Anchor {
@ -177,9 +160,8 @@ impl std::fmt::Display for Anchor {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render_anchor() {
@ -188,7 +170,10 @@ mod tests {
anchor.set_destination(Some("AnchorDest"));
assert_eq!(
anchor.render(),
r#"<a class="detached" title="" href="/node/AnchorDest">AnchorText</a>"#
concat!(
r#"<a class="detached" title="" "#,
r#"href="/node/AnchorDest">AnchorText</a>"#,
)
);
}
@ -196,9 +181,7 @@ mod tests {
#[should_panic(
expected = "Attempt to lex an anchor directly from a lexeme"
)]
fn lex() {
Anchor::lex(&Lexeme::default());
}
fn lex() { Anchor::lex(&Lexeme::default()); }
#[test]
#[should_panic(expected = "without knowing its destination")]

View file

@ -1,6 +1,4 @@
use crate::{
syntax::content::{Parseable, Lexeme},
};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Bold {
@ -8,15 +6,11 @@ pub struct Bold {
}
impl Bold {
pub fn new(open: bool) -> Bold {
Bold { open }
}
pub const fn new(open: bool) -> Bold { Bold { open } }
}
impl Parseable for Bold {
fn probe(lexeme: &Lexeme) -> bool {
lexeme.text() == "*"
}
fn probe(lexeme: &Lexeme) -> bool { lexeme.text() == "*" }
fn lex(_lexeme: &Lexeme) -> Bold {
panic!("Attempt to lex a bold tag directly from a lexeme")
@ -30,9 +24,7 @@ impl Parseable for Bold {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Bold {
@ -44,9 +36,8 @@ impl std::fmt::Display for Bold {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render() {
@ -61,9 +52,7 @@ mod tests {
#[should_panic(
expected = "Attempt to lex a bold tag directly from a lexeme"
)]
fn lex() {
Bold::lex(&Lexeme::default());
}
fn lex() { Bold::lex(&Lexeme::default()); }
#[test]
fn token_display() {

View file

@ -1,6 +1,4 @@
use crate::{
syntax::content::{Parseable, Lexeme},
};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct CheckBox {
@ -8,9 +6,7 @@ pub struct CheckBox {
}
impl CheckBox {
pub fn new(checked: bool) -> CheckBox {
CheckBox { checked }
}
pub const fn new(checked: bool) -> CheckBox { CheckBox { checked } }
}
impl Parseable for CheckBox {
@ -34,9 +30,7 @@ impl Parseable for CheckBox {
format!(r#"<input type="checkbox"{toggle}/>"#)
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for CheckBox {
@ -48,9 +42,8 @@ impl std::fmt::Display for CheckBox {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render() {

View file

@ -1,6 +1,4 @@
use crate::{
syntax::content::{Parseable, Lexeme},
};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Code {
@ -8,15 +6,11 @@ pub struct Code {
}
impl Code {
pub fn new(open: bool) -> Code {
Code { open }
}
pub const fn new(open: bool) -> Code { Code { open } }
}
impl Parseable for Code {
fn probe(lexeme: &Lexeme) -> bool {
lexeme.text() == "`"
}
fn probe(lexeme: &Lexeme) -> bool { lexeme.text() == "`" }
fn lex(_lexeme: &Lexeme) -> Code {
panic!("Attempt to lex a code tag directly from a lexeme")
@ -30,9 +24,7 @@ impl Parseable for Code {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Code {
@ -44,9 +36,8 @@ impl std::fmt::Display for Code {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render() {
@ -61,9 +52,7 @@ mod tests {
#[should_panic(
expected = "Attempt to lex a code tag directly from a lexeme"
)]
fn lex() {
Code::lex(&Lexeme::default());
}
fn lex() { Code::lex(&Lexeme::default()); }
#[test]
fn token_display() {

View file

@ -1,15 +1,14 @@
use std::{
collections::{HashMap, hash_map::Entry},
fmt::Display,
};
use crate::{
prelude::*,
graph::Config,
syntax::content::{Parseable, Lexeme},
prelude::*,
syntax::content::{Lexeme, Parseable},
};
use std::fmt::Display;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Header {
open: Option<bool>,
@ -33,7 +32,7 @@ impl Header {
) -> String {
let base_id = if !config.ascii_dom_ids || next_lexeme.next().is_ascii()
{
next_lexeme.next().clone()
next_lexeme.next()
} else {
String::from("h")
};
@ -61,7 +60,7 @@ impl Header {
}
}
pub fn level(&self) -> u8 {
pub const fn level(&self) -> u8 {
match self.level {
Level::One => 1,
Level::Two => 2,
@ -113,9 +112,7 @@ impl Parseable for Header {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Header {
@ -150,7 +147,7 @@ pub enum Level {
}
impl Level {
fn from_u8(u: u8) -> Level {
const fn from_u8(u: u8) -> Level {
if u <= 1 {
Level::One
} else if u == 2 {
@ -195,9 +192,8 @@ impl Display for Level {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn make_id() {

View file

@ -1,4 +1,4 @@
use crate::syntax::content::{Parseable, Lexeme};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Default, Debug, Clone, Eq, PartialEq)]
pub struct Item {
@ -7,9 +7,7 @@ pub struct Item {
}
impl Parseable for Item {
fn probe(_: &Lexeme) -> bool {
false
}
fn probe(_: &Lexeme) -> bool { false }
fn lex(_: &Lexeme) -> Item {
panic!("Attempt to lex an item directly from a lexeme")
@ -19,9 +17,7 @@ impl Parseable for Item {
panic!("Items should only be rendered by a list's render method")
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl Item {
@ -50,9 +46,8 @@ impl std::fmt::Display for Item {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
#[should_panic(

View file

@ -1,26 +1,18 @@
use crate::{
syntax::content::{Parseable, parser::Lexeme},
};
use crate::syntax::content::{Parseable, parser::Lexeme};
#[derive(Default, Debug, Clone, Eq, PartialEq)]
pub struct LineBreak {}
pub struct LineBreak;
impl Parseable for LineBreak {
fn probe(lexeme: &Lexeme) -> bool {
lexeme.match_char('<') && lexeme.match_next_char('\n')
}
fn lex(_lexeme: &Lexeme) -> LineBreak {
LineBreak {}
}
fn lex(_lexeme: &Lexeme) -> LineBreak { LineBreak {} }
fn render(&self) -> String {
String::from("<br>")
}
fn render(&self) -> String { String::from("<br>") }
fn flatten(&self) -> String {
String::from('\n')
}
fn flatten(&self) -> String { String::from('\n') }
}
impl std::fmt::Display for LineBreak {
@ -31,9 +23,8 @@ impl std::fmt::Display for LineBreak {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn token_display() {

View file

@ -27,8 +27,8 @@ impl Parseable for List {
/// - Strict division is performed but related panics are unreachable given
/// the guarantees described in `List::scale_indent`
/// - Saturates subtractions from indent levels at zero. This is not
/// unreachable, but a difference of zero is a no-op considering it
/// would cause an iteration of zero times (over an empty range).
/// unreachable, but a difference of zero is a no-op considering it would
/// cause an iteration of zero times (over an empty range).
fn render(&self) -> String {
let tag = if self.ordered { "ol" } else { "ul" };
let mut output = String::new();
@ -43,19 +43,19 @@ impl Parseable for List {
.unwrap_or(0)
.strict_div(scale);
output.push_str(&format!("<li>{}", item.text));
write_log!(output, "<li>{}", item.text);
if next_level > level {
// open nested lists
for _ in 0..(next_level.saturating_sub(level)) {
output.push_str(&format!("<{tag}>\n"));
write_log!(output, "<{tag}>\n");
}
} else {
// close current item
output.push_str("</li>");
// close nested lists
for _ in 0..(level.saturating_sub(next_level)) {
output.push_str(&format!("</{tag}></li>"));
write_log!(output, "</{tag}></li>");
}
output.push('\n');
}
@ -70,7 +70,7 @@ impl Parseable for List {
}
impl List {
pub fn new(ordered: bool) -> List {
pub const fn new(ordered: bool) -> List {
List {
ordered,
items: vec![],
@ -122,9 +122,8 @@ impl std::fmt::Display for List {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render_flat_list() {
@ -203,7 +202,7 @@ mod tests {
fn token_display() {
let list = List::new(false);
assert_eq!(
format!("{}", Token::List(list.clone())),
format!("{}", Token::List(list)),
"Tk:List [0 unordered items]"
);
}

View file

@ -16,13 +16,9 @@ impl Parseable for Literal {
}
}
fn render(&self) -> String {
self.text.clone()
}
fn render(&self) -> String { self.text.clone() }
fn flatten(&self) -> String {
self.text.clone()
}
fn flatten(&self) -> String { self.text.clone() }
}
impl std::fmt::Display for Literal {
@ -33,9 +29,8 @@ impl std::fmt::Display for Literal {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn token_display() {

View file

@ -1,6 +1,4 @@
use crate::{
syntax::content::{Parseable, Lexeme},
};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Oblique {
@ -8,15 +6,11 @@ pub struct Oblique {
}
impl Oblique {
pub fn new(open: bool) -> Oblique {
Oblique { open }
}
pub const fn new(open: bool) -> Oblique { Oblique { open } }
}
impl Parseable for Oblique {
fn probe(lexeme: &Lexeme) -> bool {
lexeme.text() == "_"
}
fn probe(lexeme: &Lexeme) -> bool { lexeme.text() == "_" }
fn lex(_lexeme: &Lexeme) -> Oblique {
panic!("Attempt to lex an oblique tag directly from a lexeme")
@ -30,9 +24,7 @@ impl Parseable for Oblique {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Oblique {
@ -44,9 +36,8 @@ impl std::fmt::Display for Oblique {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render() {
@ -61,9 +52,7 @@ mod tests {
#[should_panic(
expected = "Attempt to lex an oblique tag directly from a lexeme"
)]
fn lex() {
Oblique::lex(&Lexeme::default());
}
fn lex() { Oblique::lex(&Lexeme::default()); }
#[test]
fn token_display() {

View file

@ -6,9 +6,7 @@ pub struct Paragraph {
}
impl Paragraph {
pub fn new(open: bool) -> Paragraph {
Paragraph { open: Some(open) }
}
pub const fn new(open: bool) -> Paragraph { Paragraph { open: Some(open) } }
pub fn probe_end(lexeme: &Lexeme) -> bool {
lexeme.match_char('\n') && lexeme.match_next_char('\n')
@ -21,9 +19,7 @@ impl Parseable for Paragraph {
!lexeme.is_whitespace()
}
fn lex(_lexeme: &Lexeme) -> Paragraph {
Paragraph { open: None }
}
fn lex(_lexeme: &Lexeme) -> Paragraph { Paragraph { open: None } }
fn render(&self) -> String {
if let Some(open) = self.open {
@ -39,9 +35,7 @@ impl Parseable for Paragraph {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Paragraph {
@ -62,9 +56,8 @@ impl std::fmt::Display for Paragraph {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn lex() {
@ -73,9 +66,8 @@ mod tests {
}
#[test]
#[should_panic(
expected = "Attempt to render a paragraph tag while open state is unknown"
)]
#[should_panic(expected = "Attempt to render a paragraph tag while \
open state is unknown")]
fn render_state_unknown() {
let p = Paragraph::lex(&Lexeme::default());
drop(p.render());

View file

@ -1,6 +1,4 @@
use crate::{
syntax::content::{Parseable, Lexeme},
};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct PreFormat {
@ -8,9 +6,7 @@ pub struct PreFormat {
}
impl PreFormat {
pub fn new(open: bool) -> PreFormat {
PreFormat { open: Some(open) }
}
pub const fn new(open: bool) -> PreFormat { PreFormat { open: Some(open) } }
}
impl std::fmt::Display for PreFormat {
@ -29,9 +25,7 @@ impl Parseable for PreFormat {
lexeme.match_first_char('`') && (lexeme.next() == "\n" || lexeme.last())
}
fn lex(_lexeme: &Lexeme) -> PreFormat {
PreFormat { open: None }
}
fn lex(_lexeme: &Lexeme) -> PreFormat { PreFormat { open: None } }
fn render(&self) -> String {
if let Some(o) = self.open {
@ -47,16 +41,13 @@ impl Parseable for PreFormat {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn lex() {
@ -68,9 +59,8 @@ mod tests {
}
#[test]
#[should_panic(
expected = "Attempt to render a preformat tag while open state is unknown"
)]
#[should_panic(expected = "Attempt to render a preformat tag while \
open state is unknown")]
fn render() {
let from_empty_lexeme = PreFormat::lex(&Lexeme::default());
from_empty_lexeme.render();

View file

@ -26,9 +26,7 @@ impl Parseable for Quote {
lexeme.match_char('>') && lexeme.match_next_char(' ')
}
fn lex(_lexeme: &Lexeme) -> Quote {
Quote::default()
}
fn lex(_lexeme: &Lexeme) -> Quote { Quote::default() }
fn render(&self) -> String {
let opening = if let Some(url) = &self.url {
@ -49,9 +47,7 @@ impl Parseable for Quote {
format!("\n{opening}\n{content}\n</blockquote>\n")
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Quote {

View file

@ -1,6 +1,4 @@
use crate::{
syntax::content::{Parseable, Lexeme},
};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Strike {
@ -8,9 +6,7 @@ pub struct Strike {
}
impl Strike {
pub fn new(open: bool) -> Strike {
Strike { open }
}
pub const fn new(open: bool) -> Strike { Strike { open } }
}
impl Parseable for Strike {
@ -27,9 +23,7 @@ impl Parseable for Strike {
String::from(tag)
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Strike {
@ -41,9 +35,8 @@ impl std::fmt::Display for Strike {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render() {
@ -58,9 +51,7 @@ mod tests {
#[should_panic(
expected = "Attempt to lex a strike tag directly from a lexeme"
)]
fn lex() {
Strike::lex(&Lexeme::default());
}
fn lex() { Strike::lex(&Lexeme::default()); }
#[test]
fn token_display() {

View file

@ -15,9 +15,7 @@ impl Table {
self.headers.push(header.trim().to_string());
}
pub fn add_row(&mut self, row: Vec<String>) {
self.contents.push(row);
}
pub fn add_row(&mut self, row: Vec<String>) { self.contents.push(row); }
pub fn add_cell(&mut self, content: &str) {
if let Some(last) = self.contents.last_mut() {
@ -37,13 +35,9 @@ impl Table {
}
impl Parseable for Table {
fn probe(lexeme: &Lexeme) -> bool {
lexeme.match_char_sequence('%', '\n')
}
fn probe(lexeme: &Lexeme) -> bool { lexeme.match_char_sequence('%', '\n') }
fn lex(_lexeme: &Lexeme) -> Table {
Table::default()
}
fn lex(_lexeme: &Lexeme) -> Table { Table::default() }
fn render(&self) -> String {
let mut xml = String::from("\n<table>\n");
@ -73,9 +67,7 @@ impl Parseable for Table {
xml
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Table {

View file

@ -1,6 +1,4 @@
use crate::{
syntax::content::{Parseable, Lexeme},
};
use crate::syntax::content::{Lexeme, Parseable};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Underline {
@ -8,9 +6,7 @@ pub struct Underline {
}
impl Underline {
pub fn new(open: bool) -> Underline {
Underline { open }
}
pub const fn new(open: bool) -> Underline { Underline { open } }
}
impl Parseable for Underline {
@ -30,9 +26,7 @@ impl Parseable for Underline {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Underline {
@ -44,9 +38,8 @@ impl std::fmt::Display for Underline {
#[cfg(test)]
mod tests {
use crate::syntax::content::parser::Token;
use super::*;
use crate::syntax::content::parser::Token;
#[test]
fn render() {
@ -61,9 +54,7 @@ mod tests {
#[should_panic(
expected = "Attempt to lex an underline tag directly from a lexeme"
)]
fn lex() {
Underline::lex(&Lexeme::default());
}
fn lex() { Underline::lex(&Lexeme::default()); }
#[test]
fn token_display() {

View file

@ -7,7 +7,7 @@ pub struct Verse {
}
impl Verse {
pub fn new(open: bool) -> Verse {
pub const fn new(open: bool) -> Verse {
Verse {
open: Some(open),
citation: None,
@ -43,9 +43,7 @@ impl Parseable for Verse {
}
}
fn flatten(&self) -> String {
String::default()
}
fn flatten(&self) -> String { String::default() }
}
impl std::fmt::Display for Verse {

View file

@ -314,7 +314,7 @@ b
c
`
You still get "a b c" as a result. This is the case for paragraphs, but not for lists, verse blocks and preformatted text. Blockquotes support both modes.
You still get "a b c" as a result. This is the case for paragraphs, but not for lists, verse blocks, tables and preformatted text. Blockquotes support both modes.
This is useful when editing your text, allowing you to break some thoughts and special syntax without losing control over where your paragraph ends, particularly when handling huge paragraphs.
@ -418,7 +418,7 @@ If you have a more complex citation, you can use multiple lines starting with `-
-- Benedita da Silva,
-- |Speech on the Federal Senate|https://www25.senado.leg.br/web/atividade/pronunciamentos/-/p/pronunciamento/165765|,
-- March 3rd, 1995, <
-- International Day for the Elimination of Racial Discrimination.
-- International Day for the Elimination of Racial Discrimination
`
> Dois grandes mitos dominam a história oficial do Brasil:
@ -426,7 +426,7 @@ If you have a more complex citation, you can use multiple lines starting with `-
-- Benedita da Silva,
-- |Speech on the Federal Senate|https://www25.senado.leg.br/web/atividade/pronunciamentos/-/p/pronunciamento/165765|,
-- March 3rd, 1995, <
-- Dia Internacional para a Eliminação da Discriminação Racial.
-- International Day for the Elimination of Racial Discrimination
The first URL found in your citation will be used as the blockquote element's `cite` value.
@ -456,6 +456,50 @@ Lines starting with a `+` character will create numbered lists instead:
+ ni
+ san
### Tables
Tables are blocks delimited by a sole `%` on its own line:
`
%
Country ! Capital
Colombia | Bogotá
Belgium | Brussels
Palestine | Jerusalem
Zambia | Lusaka
%
`
%
Country ! Capital
Colombia | Bogotá
Belgium | Brussels
Palestine | Jerusalem
Zambia | Lusaka
%
Table cells are delimited by either a `!` for headers or `|` for common cells. These delimiters must be surrounded by at least one space to each side and are optional at the first and last position of each line.
This means you can use any of the following formats:
`
%
middle | only
tail | only |
| lead | only
| fully | wrapped |
%
`
%
middle | only
tail | only |
| lead | only
| fully | wrapped |
%
Because at least one space is required around each delimiter, you must indent the table inside the surrounding `%` markers by at least one space.
## Rendering unformatted text
The backtick character `\\`` can be used to render unformatted blocks and inline text:
@ -482,25 +526,23 @@ Finally, you can precede any character with a `\\\\` to fully _escape_ that char
## Raw HTML
If you need some more advanced feature that is not supported directly by en's markup snytax, you can always just write plain HTML and it will be passed along. For example, you could render a table:
If you need some more advanced feature that is not supported directly by en's markup snytax, you can always just write plain HTML and it will be passed along. For example, you could render a form:
`
&lt;table&gt;
&lt;tr&gt;
&lt;td&gt; Hi, &lt;/td&gt;
&lt;td&gt; *HTML*! &lt;/td&gt;
&lt;/tr&gt;
&lt;/table&gt;
&lt;form style="text-align: center;"&gt;
&lt;label for="name"&gt; *__Name__* &lt;/label&gt;
&lt;input type="text" id="name"/&gt;
&lt;input type="submit"/&gt;
&lt;/form&gt;
`
<table>
<tr>
<td> Hi, </td>
<td> *HTML*! </td>
</tr>
</table>
<form style="text-align: center;">
<label for="name"> *__Name__* </label>
<input type="text" id="name"/>
<input type="submit"/>
</form>
Notice that, as shown in this example, you can mix en syntax and HTML. You might want to add a space between your HTML tags and en special syntax so the boundary is clearer, but otherwise they don't tend to overlap since the symbols most used in HTML are not special in en.
Notice that, as shown in this example, you can mix en syntax and HTML. You might want to add a space between your HTML tags and en special syntax so the boundary is clearer, but otherwise they don't tend to overlap since the symbols most used in HTML are not special in en with the exception of `<`, which is interpreted specially only at the end of lines.
If you want to avoid either one of these syntaxes from being interpreted specially, you should escape the relevant characters as explained in the previous section.
"""
@ -723,11 +765,7 @@ text = """
- [ ] Invert where redirects are set
- [x] Formatting
- [x] Blockquotes
- [ ] Tables
- `%` block
- newline for rows
- indented, space-surrounded `!` wrap for headers
- indented, space-surrounded `|` wrap for cells
- [x] Tables
- [x] Nested formatting
- [x] Headers
- [x] Preformatted blocks

View file

@ -1,4 +1,5 @@
:root {
color-scheme: light dark;
--base-font-size: 1em;
}
@ -15,6 +16,8 @@ body {
line-height: 1.75;
box-sizing: border-box;
min-width: 0;
background: light-dark(#eee, #222);
color: light-dark(#000, #f1e9e5);
}
main {
@ -61,14 +64,14 @@ code:hover {
}
pre, code {
background: light-dark(#e0e0e0, #333);
font-family: mono, monospace;
background: #e0e0e0;
border: solid 2px #d0d0d0;
border: solid 2px light-dark(#d0d0d0, #434343);
border-radius: 6px;
}
a {
color: #0f6366;
color: light-dark(#0f6366, #1dd7d7);
text-decoration: underline dotted #159b9b;
text-decoration-thickness: 2.5px;
text-underline-offset: 3px;
@ -78,27 +81,27 @@ a {
a.attached:hover,
#nav-main a:hover
{
color: #117c7c;
color: light-dark(#117c7c, #00ffff);
text-shadow: 0px 0px 22px #10afaf;
transition: 1500ms;
}
a.detached {
color: #595959;
text-decoration-color: #444444;
color: light-dark(#595959, #acacac);
text-decoration-color: light-dark(#444444, #777);
}
a.external {
color: #1958a7;
text-decoration-color: #2A7CDF;
color: light-dark(#1958a7, #2fbae4);
text-decoration-color: light-dark(#2A7CDF, #46c1e7);
text-decoration-style: solid;
text-decoration-thickness: 1.5px;
transition: 1500ms;
}
a.external:hover {
color: #0393b2;
text-decoration-color: #1ed4f1;
color: light-dark(#0393b2, #74e5ff);
text-decoration-color: light-dark(#1ed4f1, #aeffff);
transition: 1500ms;
}
@ -106,7 +109,7 @@ a:visited,
a.detached:visited,
a.external:visited
{
text-decoration-color: #bbb;
text-decoration-color: light-dark(#bbb, #999);
transition: 1500ms;
}
@ -143,14 +146,14 @@ span.root-label {
span.id-label {
font-family: mono, monospace;
background: #e0e0e0;
border: solid 1px #d0d0d0;
background: light-dark(#e0e0e0, #444);
border: solid 1px light-dark(#d0d0d0, #666);
}
span.hidden-label {
background: #888;
color: #eee;
border: solid 1px #d0d0d0;
background: light-dark(#888, #000);
color: light-dark(#eee, #969696);
border: solid 1px light-dark(#d0d0d0, #555);
}
h1 {
@ -276,8 +279,8 @@ button
border-radius: 5px;
padding: 5px 8px;
margin-right: 3px;
background: #eeeeff00;
border-color: #216767;
background: light-dark(#eeeeff00, #002020);
border-color: light-dark(#216767, #138e8e);
border-width: 0.5px;
transition: 1500ms;
}
@ -287,7 +290,7 @@ input[type="submit"]:hover,
select:hover,
button:hover
{
border-color: #36a9a9;
border-color: light-dark(#36a9a9, #00ffff);
box-shadow: 2px 2px #36a9a9ee;
}
@ -315,9 +318,9 @@ table {
}
table th {
background: #099;
background: light-dark(#099, #002929);
color: #fff;
border-color: #222;
border-color: light-dark(#222, #666);
}
td, th {
@ -362,92 +365,9 @@ p.verse {
}
@media (prefers-color-scheme: dark) {
* {
background: #222222;
color: #f1e9e5;
}
pre, code {
background: #333333;
border: solid 1px #434343;
}
a {
color: #1dd7d7;
transition: 1500ms;
}
a.attached:hover,
#nav-main a:hover
{
color: #00ffff;
transition: 1500ms;
}
a.external {
color: #2fbae4;
text-decoration-color: #46c1e7;
transition: 1500ms;
}
a.external:hover {
color: #74e5ff;
text-decoration-color: #aeffff;
transition: 1500ms;
}
a.detached {
color: #acacac;
text-decoration-color: #777;
transition: 1500ms;
}
span.id-label {
background: #444;
border-color: #666;
}
span.hidden-label {
background: #000;
border-color: #555;
color: #969696;
}
a:visited,
a.detached:visited,
a.external:visited
{
text-decoration-color: #999;
transition: 1500ms;
}
input[type="text"],
input[type="submit"],
select,
button
{
background: #002020;
border-color: #138e8e;
}
input[type="text"]:hover,
input[type="submit"]:hover,
select:hover,
button:hover
{
border-color: #00ffff;
}
span.root-label {
border-width: 1px;
}
table th {
background: #002929;
border-color: #666;
}
}
@media (max-width: 600px) {