Skip to content

Commit

Permalink
Update syntax of html5gum::Tokenizer
Browse files Browse the repository at this point in the history
  • Loading branch information
mre committed Nov 6, 2024
1 parent 4d9e4a1 commit 9414d6e
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions lychee-lib/src/extract/html/html5gum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ impl Emitter for &mut LinkExtractor {
/// Extract unparsed URL strings from an HTML string.
pub(crate) fn extract_html(buf: &str, include_verbatim: bool) -> Vec<RawUri> {
let mut extractor = LinkExtractor::new(include_verbatim);
let mut tokenizer = Tokenizer::new_with_emitter(buf, &mut extractor).infallible();
let mut tokenizer = Tokenizer::new_with_emitter(buf, &mut extractor);
assert!(tokenizer.next().is_none());
extractor
.links
Expand All @@ -357,7 +357,7 @@ pub(crate) fn extract_html(buf: &str, include_verbatim: bool) -> Vec<RawUri> {
/// Extract fragments from id attributes within a HTML string.
pub(crate) fn extract_html_fragments(buf: &str) -> HashSet<String> {
let mut extractor = LinkExtractor::new(true);
let mut tokenizer = Tokenizer::new_with_emitter(buf, &mut extractor).infallible();
let mut tokenizer = Tokenizer::new_with_emitter(buf, &mut extractor);
assert!(tokenizer.next().is_none());
extractor.fragments
}
Expand Down

0 comments on commit 9414d6e

Please sign in to comment.