Skip to content

Commit 90b5aef

Browse files
authored
Merge branch 'main' into test/unordered-and-ordered-list
Signed-off-by: Titus <[email protected]>
2 parents 3080f45 + eea185e commit 90b5aef

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

85 files changed

+164
-182
lines changed

Untitled.txt

-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ micromark.js: `atLineEnding` in html (text) should always eat arbitrary whitespa
66
```rs
77
// ---------------------
88
// Useful helper:
9-
extern crate std;
109
use std::println;
1110
use alloc::string::String;
1211

benches/bench.rs

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
#[macro_use]
2-
extern crate criterion;
3-
use criterion::{BenchmarkId, Criterion};
1+
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
42
use std::fs;
53

64
fn readme(c: &mut Criterion) {

build.rs

-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
extern crate reqwest;
21
use regex::Regex;
32
use std::fs;
43

@@ -67,7 +66,6 @@ async fn commonmark() {
6766
// > 👉 **Important**: this module is generated by `build.rs`.
6867
// > It is generate from the latest CommonMark website.
6968
70-
extern crate markdown;
7169
use markdown::{{to_html_with_options, CompileOptions, Options}};
7270
use pretty_assertions::assert_eq;
7371

examples/lib.rs

-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
extern crate markdown;
2-
31
fn main() -> Result<(), String> {
42
// Turn on debugging.
53
// You can show it with `RUST_LOG=debug cargo run --example lib`

fuzz/fuzz_targets/markdown.rs

+1-5
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,10 @@
11
#![no_main]
22
use libfuzzer_sys::fuzz_target;
3-
extern crate markdown;
43

54
fuzz_target!(|data: &[u8]| {
65
if let Ok(s) = std::str::from_utf8(data) {
76
let _ = markdown::to_html(s);
8-
let _ = markdown::to_html_with_options(
9-
s,
10-
&markdown::Options::gfm()
11-
);
7+
let _ = markdown::to_html_with_options(s, &markdown::Options::gfm());
128
let _ = markdown::to_mdast(s, &markdown::ParseOptions::default());
139
let _ = markdown::to_mdast(s, &markdown::ParseOptions::gfm());
1410
}

readme.md

-6
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,6 @@ cargo add [email protected]
9090
## Use
9191

9292
```rs
93-
extern crate markdown;
94-
9593
fn main() {
9694
println!("{}", markdown::to_html("## Hello, *world*!"));
9795
}
@@ -106,8 +104,6 @@ Yields:
106104
Extensions (in this case GFM):
107105

108106
```rs
109-
extern crate markdown;
110-
111107
fn main() -> Result<(), String> {
112108
println!(
113109
"{}",
@@ -136,8 +132,6 @@ Yields:
136132
Syntax tree ([mdast][]):
137133

138134
```rs
139-
extern crate markdown;
140-
141135
fn main() -> Result<(), String> {
142136
println!(
143137
"{:?}",

src/configuration.rs

-1
Original file line numberDiff line numberDiff line change
@@ -1236,7 +1236,6 @@ impl Options {
12361236

12371237
#[cfg(test)]
12381238
mod tests {
1239-
extern crate std;
12401239
use super::*;
12411240
use crate::util::mdx::Signal;
12421241
use alloc::format;

src/construct/attention.rs

+12-11
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,10 @@ use alloc::{vec, vec::Vec};
9292
struct Sequence {
9393
/// Marker as a byte (`u8`) used in this sequence.
9494
marker: u8,
95-
/// The depth in events where this sequence resides.
96-
balance: usize,
95+
/// We track whether sequences are in balanced events, and where those
96+
/// events start, so that one attention doesn’t start in say, one link, and
97+
/// end in another.
98+
stack: Vec<usize>,
9799
/// The index into events where this sequence’s `Enter` currently resides.
98100
index: usize,
99101
/// The (shifted) point where this sequence starts.
@@ -172,7 +174,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
172174
// An opener matching our closer:
173175
if sequence_open.open
174176
&& sequence_close.marker == sequence_open.marker
175-
&& sequence_close.balance == sequence_open.balance
177+
&& sequence_close.stack == sequence_open.stack
176178
{
177179
// If the opening can close or the closing can open,
178180
// and the close size *is not* a multiple of three,
@@ -219,23 +221,20 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
219221
}
220222

221223
tokenizer.map.consume(&mut tokenizer.events);
222-
223224
None
224225
}
225226

226227
/// Get sequences.
227228
fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
228229
let mut index = 0;
229-
let mut balance = 0;
230+
let mut stack = vec![];
230231
let mut sequences = vec![];
231232

232233
while index < tokenizer.events.len() {
233234
let enter = &tokenizer.events[index];
234235

235-
if enter.kind == Kind::Enter {
236-
balance += 1;
237-
238-
if enter.name == Name::AttentionSequence {
236+
if enter.name == Name::AttentionSequence {
237+
if enter.kind == Kind::Enter {
239238
let end = index + 1;
240239
let exit = &tokenizer.events[end];
241240

@@ -255,7 +254,7 @@ fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
255254

256255
sequences.push(Sequence {
257256
index,
258-
balance,
257+
stack: stack.clone(),
259258
start_point: enter.point.clone(),
260259
end_point: exit.point.clone(),
261260
size: exit.point.index - enter.point.index,
@@ -272,8 +271,10 @@ fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
272271
marker,
273272
});
274273
}
274+
} else if enter.kind == Kind::Enter {
275+
stack.push(index);
275276
} else {
276-
balance -= 1;
277+
stack.pop();
277278
}
278279

279280
index += 1;

src/construct/document.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -423,7 +423,7 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
423423
if !document_lazy_continuation_current && !child.events.is_empty() {
424424
let before = skip::opt_back(&child.events, child.events.len() - 1, &[Name::LineEnding]);
425425
let name = &child.events[before].name;
426-
if name == &Name::Content {
426+
if name == &Name::Content || name == &Name::HeadingSetextUnderline {
427427
document_lazy_continuation_current = true;
428428
}
429429
}

src/construct/gfm_autolink_literal.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -334,7 +334,7 @@ pub fn www_prefix_inside(tokenizer: &mut Tokenizer) -> State {
334334
/// ```
335335
pub fn www_prefix_after(tokenizer: &mut Tokenizer) -> State {
336336
// If there is *anything*, we can link.
337-
if tokenizer.current == None {
337+
if tokenizer.current.is_none() {
338338
State::Nok
339339
} else {
340340
State::Ok

src/construct/gfm_table.rs

+1
Original file line numberDiff line numberDiff line change
@@ -883,6 +883,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
883883
flush_table_end(tokenizer, last_table_end, last_table_has_body);
884884
}
885885

886+
tokenizer.map.consume(&mut tokenizer.events);
886887
None
887888
}
888889

src/construct/gfm_task_list_item_check.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
6161
.tokenize_state
6262
.document_at_first_paragraph_of_list_item
6363
&& tokenizer.current == Some(b'[')
64-
&& tokenizer.previous == None
64+
&& tokenizer.previous.is_none()
6565
{
6666
tokenizer.enter(Name::GfmTaskListItemCheck);
6767
tokenizer.enter(Name::GfmTaskListItemMarker);
@@ -149,7 +149,7 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
149149
/// ```
150150
pub fn after_space_or_tab(tokenizer: &mut Tokenizer) -> State {
151151
// End of paragraph, after whitespace, after check, is not okay.
152-
if tokenizer.current == None {
152+
if tokenizer.current.is_none() {
153153
State::Nok
154154
} else {
155155
State::Ok

src/construct/heading_atx.rs

+1
Original file line numberDiff line numberDiff line change
@@ -280,5 +280,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
280280
index += 1;
281281
}
282282

283+
tokenizer.map.consume(&mut tokenizer.events);
283284
None
284285
}

src/construct/heading_setext.rs

+2-6
Original file line numberDiff line numberDiff line change
@@ -92,13 +92,12 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
9292
&& !tokenizer.pierce
9393
// Require a paragraph before.
9494
&& (!tokenizer.events.is_empty()
95-
&& tokenizer.events[skip::opt_back(
95+
&& matches!(tokenizer.events[skip::opt_back(
9696
&tokenizer.events,
9797
tokenizer.events.len() - 1,
9898
&[Name::LineEnding, Name::SpaceOrTab],
9999
)]
100-
.name
101-
== Name::Content)
100+
.name, Name::Content | Name::HeadingSetextUnderline))
102101
{
103102
tokenizer.enter(Name::HeadingSetextUnderline);
104103

@@ -185,8 +184,6 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
185184

186185
/// Resolve heading (setext).
187186
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
188-
tokenizer.map.consume(&mut tokenizer.events);
189-
190187
let mut enter = skip::to(&tokenizer.events, 0, &[Name::HeadingSetextUnderline]);
191188

192189
while enter < tokenizer.events.len() {
@@ -280,6 +277,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
280277
}
281278

282279
tokenizer.map.consume(&mut tokenizer.events);
283-
284280
None
285281
}

src/construct/label_end.rs

-1
Original file line numberDiff line numberDiff line change
@@ -669,7 +669,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
669669
mark_as_data(tokenizer, &starts);
670670

671671
tokenizer.map.consume(&mut tokenizer.events);
672-
673672
None
674673
}
675674

src/construct/list_item.rs

+1
Original file line numberDiff line numberDiff line change
@@ -469,5 +469,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
469469
index += 1;
470470
}
471471

472+
tokenizer.map.consume(&mut tokenizer.events);
472473
None
473474
}

src/construct/mdx_esm.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ fn parse_esm(tokenizer: &mut Tokenizer) -> State {
216216
State::Error(format!("{}:{}: {}", point.line, point.column, message))
217217
}
218218
MdxSignal::Eof(message) => {
219-
if tokenizer.current == None {
219+
if tokenizer.current.is_none() {
220220
State::Error(format!(
221221
"{}:{}: {}",
222222
tokenizer.point.line, tokenizer.point.column, message

src/construct/partial_data.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,6 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
7474

7575
/// Merge adjacent data events.
7676
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
77-
tokenizer.map.consume(&mut tokenizer.events);
78-
7977
let mut index = 0;
8078

8179
// Loop through events and merge adjacent data events.
@@ -107,5 +105,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
107105
index += 1;
108106
}
109107

108+
tokenizer.map.consume(&mut tokenizer.events);
110109
None
111110
}

src/construct/partial_mdx_jsx.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1119,7 +1119,7 @@ fn crash(tokenizer: &Tokenizer, at: &str, expect: &str) -> State {
11191119
"{}:{}: Unexpected {} {}, expected {}",
11201120
tokenizer.point.line,
11211121
tokenizer.point.column,
1122-
format_char_opt(if tokenizer.current == None {
1122+
format_char_opt(if tokenizer.current.is_none() {
11231123
None
11241124
} else {
11251125
char_after_index(tokenizer.parse_state.bytes, tokenizer.point.index)

src/construct/partial_whitespace.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,6 @@ use alloc::vec;
6767

6868
/// Resolve whitespace.
6969
pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whole: bool) {
70-
tokenizer.map.consume(&mut tokenizer.events);
71-
7270
let mut index = 0;
7371

7472
while index < tokenizer.events.len() {
@@ -86,6 +84,8 @@ pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whol
8684

8785
index += 1;
8886
}
87+
88+
tokenizer.map.consume(&mut tokenizer.events);
8989
}
9090

9191
/// Trim a [`Data`][Name::Data] event.

src/construct/text.rs

+1
Original file line numberDiff line numberDiff line change
@@ -259,5 +259,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
259259
resolve_gfm_autolink_literal(tokenizer);
260260
}
261261

262+
tokenizer.map.consume(&mut tokenizer.events);
262263
None
263264
}

src/mdast.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ impl ToString for Node {
250250
| Node::Image(_)
251251
| Node::ImageReference(_)
252252
| Node::ThematicBreak(_)
253-
| Node::Definition(_) => "".into(),
253+
| Node::Definition(_) => String::new(),
254254
}
255255
}
256256
}

src/subtokenize.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ pub fn subtokenize(
9696
debug_assert_eq!(event.kind, Kind::Enter);
9797

9898
// No need to enter linked events again.
99-
if link.previous == None
99+
if link.previous.is_none()
100100
&& (filter.is_none() || &link.content == filter.as_ref().unwrap())
101101
{
102102
// Index into `events` pointing to a chunk.
@@ -148,7 +148,7 @@ pub fn subtokenize(
148148
let link_curr = enter.link.as_ref().expect("expected link");
149149
debug_assert_eq!(enter.kind, Kind::Enter);
150150

151-
if link_curr.previous != None {
151+
if link_curr.previous.is_some() {
152152
tokenizer.define_skip(enter.point.clone());
153153
}
154154

src/to_html.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -685,7 +685,7 @@ fn on_enter_paragraph(context: &mut CompileContext) {
685685
/// Handle [`Enter`][Kind::Enter]:[`Resource`][Name::Resource].
686686
fn on_enter_resource(context: &mut CompileContext) {
687687
context.buffer(); // We can have line endings in the resource, ignore them.
688-
context.media_stack.last_mut().unwrap().destination = Some("".into());
688+
context.media_stack.last_mut().unwrap().destination = Some(String::new());
689689
}
690690

691691
/// Handle [`Enter`][Kind::Enter]:[`ResourceDestinationString`][Name::ResourceDestinationString].

0 commit comments

Comments
 (0)