Skip to content

Commit

Permalink
Merge branch 'main' into test/unordered-and-ordered-list
Browse files Browse the repository at this point in the history
Signed-off-by: Titus <[email protected]>
  • Loading branch information
wooorm authored Nov 25, 2022
2 parents 3080f45 + eea185e commit 90b5aef
Show file tree
Hide file tree
Showing 85 changed files with 164 additions and 182 deletions.
1 change: 0 additions & 1 deletion Untitled.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ micromark.js: `atLineEnding` in html (text) should always eat arbitrary whitespa
```rs
// ---------------------
// Useful helper:
extern crate std;
use std::println;
use alloc::string::String;

Expand Down
4 changes: 1 addition & 3 deletions benches/bench.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
#[macro_use]
extern crate criterion;
use criterion::{BenchmarkId, Criterion};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use std::fs;

fn readme(c: &mut Criterion) {
Expand Down
2 changes: 0 additions & 2 deletions build.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
extern crate reqwest;
use regex::Regex;
use std::fs;

Expand Down Expand Up @@ -67,7 +66,6 @@ async fn commonmark() {
// > 👉 **Important**: this module is generated by `build.rs`.
// > It is generate from the latest CommonMark website.
extern crate markdown;
use markdown::{{to_html_with_options, CompileOptions, Options}};
use pretty_assertions::assert_eq;
Expand Down
2 changes: 0 additions & 2 deletions examples/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
extern crate markdown;

fn main() -> Result<(), String> {
// Turn on debugging.
// You can show it with `RUST_LOG=debug cargo run --example lib`
Expand Down
6 changes: 1 addition & 5 deletions fuzz/fuzz_targets/markdown.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,10 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
extern crate markdown;

fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
let _ = markdown::to_html(s);
let _ = markdown::to_html_with_options(
s,
&markdown::Options::gfm()
);
let _ = markdown::to_html_with_options(s, &markdown::Options::gfm());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::default());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::gfm());
}
Expand Down
6 changes: 0 additions & 6 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,6 @@ cargo add [email protected]
## Use

```rs
extern crate markdown;

fn main() {
println!("{}", markdown::to_html("## Hello, *world*!"));
}
Expand All @@ -106,8 +104,6 @@ Yields:
Extensions (in this case GFM):

```rs
extern crate markdown;

fn main() -> Result<(), String> {
println!(
"{}",
Expand Down Expand Up @@ -136,8 +132,6 @@ Yields:
Syntax tree ([mdast][]):

```rs
extern crate markdown;

fn main() -> Result<(), String> {
println!(
"{:?}",
Expand Down
1 change: 0 additions & 1 deletion src/configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1236,7 +1236,6 @@ impl Options {

#[cfg(test)]
mod tests {
extern crate std;
use super::*;
use crate::util::mdx::Signal;
use alloc::format;
Expand Down
23 changes: 12 additions & 11 deletions src/construct/attention.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,10 @@ use alloc::{vec, vec::Vec};
struct Sequence {
/// Marker as a byte (`u8`) used in this sequence.
marker: u8,
/// The depth in events where this sequence resides.
balance: usize,
/// We track whether sequences are in balanced events, and where those
/// events start, so that one attention doesn’t start in say, one link, and
/// end in another.
stack: Vec<usize>,
/// The index into events where this sequence’s `Enter` currently resides.
index: usize,
/// The (shifted) point where this sequence starts.
Expand Down Expand Up @@ -172,7 +174,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
// An opener matching our closer:
if sequence_open.open
&& sequence_close.marker == sequence_open.marker
&& sequence_close.balance == sequence_open.balance
&& sequence_close.stack == sequence_open.stack
{
// If the opening can close or the closing can open,
// and the close size *is not* a multiple of three,
Expand Down Expand Up @@ -219,23 +221,20 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
}

tokenizer.map.consume(&mut tokenizer.events);

None
}

/// Get sequences.
fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
let mut index = 0;
let mut balance = 0;
let mut stack = vec![];
let mut sequences = vec![];

while index < tokenizer.events.len() {
let enter = &tokenizer.events[index];

if enter.kind == Kind::Enter {
balance += 1;

if enter.name == Name::AttentionSequence {
if enter.name == Name::AttentionSequence {
if enter.kind == Kind::Enter {
let end = index + 1;
let exit = &tokenizer.events[end];

Expand All @@ -255,7 +254,7 @@ fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {

sequences.push(Sequence {
index,
balance,
stack: stack.clone(),
start_point: enter.point.clone(),
end_point: exit.point.clone(),
size: exit.point.index - enter.point.index,
Expand All @@ -272,8 +271,10 @@ fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
marker,
});
}
} else if enter.kind == Kind::Enter {
stack.push(index);
} else {
balance -= 1;
stack.pop();
}

index += 1;
Expand Down
2 changes: 1 addition & 1 deletion src/construct/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
if !document_lazy_continuation_current && !child.events.is_empty() {
let before = skip::opt_back(&child.events, child.events.len() - 1, &[Name::LineEnding]);
let name = &child.events[before].name;
if name == &Name::Content {
if name == &Name::Content || name == &Name::HeadingSetextUnderline {
document_lazy_continuation_current = true;
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/construct/gfm_autolink_literal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ pub fn www_prefix_inside(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn www_prefix_after(tokenizer: &mut Tokenizer) -> State {
// If there is *anything*, we can link.
if tokenizer.current == None {
if tokenizer.current.is_none() {
State::Nok
} else {
State::Ok
Expand Down
1 change: 1 addition & 0 deletions src/construct/gfm_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -883,6 +883,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
flush_table_end(tokenizer, last_table_end, last_table_has_body);
}

tokenizer.map.consume(&mut tokenizer.events);
None
}

Expand Down
4 changes: 2 additions & 2 deletions src/construct/gfm_task_list_item_check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
.tokenize_state
.document_at_first_paragraph_of_list_item
&& tokenizer.current == Some(b'[')
&& tokenizer.previous == None
&& tokenizer.previous.is_none()
{
tokenizer.enter(Name::GfmTaskListItemCheck);
tokenizer.enter(Name::GfmTaskListItemMarker);
Expand Down Expand Up @@ -149,7 +149,7 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn after_space_or_tab(tokenizer: &mut Tokenizer) -> State {
// End of paragraph, after whitespace, after check, is not okay.
if tokenizer.current == None {
if tokenizer.current.is_none() {
State::Nok
} else {
State::Ok
Expand Down
1 change: 1 addition & 0 deletions src/construct/heading_atx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,5 +280,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
index += 1;
}

tokenizer.map.consume(&mut tokenizer.events);
None
}
8 changes: 2 additions & 6 deletions src/construct/heading_setext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,12 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
&& !tokenizer.pierce
// Require a paragraph before.
&& (!tokenizer.events.is_empty()
&& tokenizer.events[skip::opt_back(
&& matches!(tokenizer.events[skip::opt_back(
&tokenizer.events,
tokenizer.events.len() - 1,
&[Name::LineEnding, Name::SpaceOrTab],
)]
.name
== Name::Content)
.name, Name::Content | Name::HeadingSetextUnderline))
{
tokenizer.enter(Name::HeadingSetextUnderline);

Expand Down Expand Up @@ -185,8 +184,6 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {

/// Resolve heading (setext).
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
tokenizer.map.consume(&mut tokenizer.events);

let mut enter = skip::to(&tokenizer.events, 0, &[Name::HeadingSetextUnderline]);

while enter < tokenizer.events.len() {
Expand Down Expand Up @@ -280,6 +277,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
}

tokenizer.map.consume(&mut tokenizer.events);

None
}
1 change: 0 additions & 1 deletion src/construct/label_end.rs
Original file line number Diff line number Diff line change
Expand Up @@ -669,7 +669,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
mark_as_data(tokenizer, &starts);

tokenizer.map.consume(&mut tokenizer.events);

None
}

Expand Down
1 change: 1 addition & 0 deletions src/construct/list_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -469,5 +469,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
index += 1;
}

tokenizer.map.consume(&mut tokenizer.events);
None
}
2 changes: 1 addition & 1 deletion src/construct/mdx_esm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ fn parse_esm(tokenizer: &mut Tokenizer) -> State {
State::Error(format!("{}:{}: {}", point.line, point.column, message))
}
MdxSignal::Eof(message) => {
if tokenizer.current == None {
if tokenizer.current.is_none() {
State::Error(format!(
"{}:{}: {}",
tokenizer.point.line, tokenizer.point.column, message
Expand Down
3 changes: 1 addition & 2 deletions src/construct/partial_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,6 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {

/// Merge adjacent data events.
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
tokenizer.map.consume(&mut tokenizer.events);

let mut index = 0;

// Loop through events and merge adjacent data events.
Expand Down Expand Up @@ -107,5 +105,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
index += 1;
}

tokenizer.map.consume(&mut tokenizer.events);
None
}
2 changes: 1 addition & 1 deletion src/construct/partial_mdx_jsx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1119,7 +1119,7 @@ fn crash(tokenizer: &Tokenizer, at: &str, expect: &str) -> State {
"{}:{}: Unexpected {} {}, expected {}",
tokenizer.point.line,
tokenizer.point.column,
format_char_opt(if tokenizer.current == None {
format_char_opt(if tokenizer.current.is_none() {
None
} else {
char_after_index(tokenizer.parse_state.bytes, tokenizer.point.index)
Expand Down
4 changes: 2 additions & 2 deletions src/construct/partial_whitespace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,6 @@ use alloc::vec;

/// Resolve whitespace.
pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whole: bool) {
tokenizer.map.consume(&mut tokenizer.events);

let mut index = 0;

while index < tokenizer.events.len() {
Expand All @@ -86,6 +84,8 @@ pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whol

index += 1;
}

tokenizer.map.consume(&mut tokenizer.events);
}

/// Trim a [`Data`][Name::Data] event.
Expand Down
1 change: 1 addition & 0 deletions src/construct/text.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,5 +259,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
resolve_gfm_autolink_literal(tokenizer);
}

tokenizer.map.consume(&mut tokenizer.events);
None
}
2 changes: 1 addition & 1 deletion src/mdast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ impl ToString for Node {
| Node::Image(_)
| Node::ImageReference(_)
| Node::ThematicBreak(_)
| Node::Definition(_) => "".into(),
| Node::Definition(_) => String::new(),
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/subtokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ pub fn subtokenize(
debug_assert_eq!(event.kind, Kind::Enter);

// No need to enter linked events again.
if link.previous == None
if link.previous.is_none()
&& (filter.is_none() || &link.content == filter.as_ref().unwrap())
{
// Index into `events` pointing to a chunk.
Expand Down Expand Up @@ -148,7 +148,7 @@ pub fn subtokenize(
let link_curr = enter.link.as_ref().expect("expected link");
debug_assert_eq!(enter.kind, Kind::Enter);

if link_curr.previous != None {
if link_curr.previous.is_some() {
tokenizer.define_skip(enter.point.clone());
}

Expand Down
2 changes: 1 addition & 1 deletion src/to_html.rs
Original file line number Diff line number Diff line change
Expand Up @@ -685,7 +685,7 @@ fn on_enter_paragraph(context: &mut CompileContext) {
/// Handle [`Enter`][Kind::Enter]:[`Resource`][Name::Resource].
fn on_enter_resource(context: &mut CompileContext) {
context.buffer(); // We can have line endings in the resource, ignore them.
context.media_stack.last_mut().unwrap().destination = Some("".into());
context.media_stack.last_mut().unwrap().destination = Some(String::new());
}

/// Handle [`Enter`][Kind::Enter]:[`ResourceDestinationString`][Name::ResourceDestinationString].
Expand Down
Loading

0 comments on commit 90b5aef

Please sign in to comment.