Skip to content

Commit

Permalink
Fix format
Browse files Browse the repository at this point in the history
Signed-off-by: Taym <[email protected]>
  • Loading branch information
Taym95 committed Jul 26, 2024
1 parent 00f249e commit d86b273
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 42 deletions.
22 changes: 11 additions & 11 deletions html5ever/src/tokenizer/char_ref/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ impl CharRefTokenizer {
pub(super) fn step<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
if self.result.is_some() {
return Done;
Expand All @@ -135,7 +135,7 @@ impl CharRefTokenizer {
fn do_begin<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
'a'..='z' | 'A'..='Z' | '0'..='9' => {
Expand All @@ -156,7 +156,7 @@ impl CharRefTokenizer {
fn do_octothorpe<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
match c {
Expand All @@ -177,7 +177,7 @@ impl CharRefTokenizer {
fn do_numeric<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
base: u32,
) -> Status {
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
Expand Down Expand Up @@ -207,7 +207,7 @@ impl CharRefTokenizer {
fn do_numeric_semicolon<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
';' => tokenizer.discard_char(input),
Expand All @@ -221,7 +221,7 @@ impl CharRefTokenizer {
fn unconsume_numeric<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
let mut unconsume = StrTendril::from_char('#');
if let Some(c) = self.hex_marker {
Expand Down Expand Up @@ -270,7 +270,7 @@ impl CharRefTokenizer {
fn do_named<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
// peek + discard skips over newline normalization, therefore making it easier to
// un-consume
Expand Down Expand Up @@ -304,14 +304,14 @@ impl CharRefTokenizer {
tokenizer.emit_error(msg);
}

fn unconsume_name(&mut self, input: & BufferQueue) {
fn unconsume_name(&mut self, input: &BufferQueue) {
input.push_front(self.name_buf_opt.take().unwrap());
}

fn finish_named<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
end_char: Option<char>,
) -> Status {
match self.name_match {
Expand Down Expand Up @@ -395,7 +395,7 @@ impl CharRefTokenizer {
fn do_bogus_name<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
// peek + discard skips over newline normalization, therefore making it easier to
// un-consume
Expand All @@ -414,7 +414,7 @@ impl CharRefTokenizer {
pub(super) fn end_of_file<Sink: TokenSink>(
&mut self,
tokenizer: &mut Tokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) {
while self.result.is_none() {
match self.state {
Expand Down
18 changes: 9 additions & 9 deletions html5ever/src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}

/// Feed an input string into the tokenizer.
pub fn feed(&mut self, input: & BufferQueue) -> TokenizerResult<Sink::Handle> {
pub fn feed(&mut self, input: &BufferQueue) -> TokenizerResult<Sink::Handle> {
if input.is_empty() {
return TokenizerResult::Done;
}
Expand Down Expand Up @@ -248,7 +248,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
//§ preprocessing-the-input-stream
// Get the next input character, which might be the character
// 'c' that we already consumed from the buffers.
fn get_preprocessed_char(&mut self, mut c: char, input: & BufferQueue) -> Option<char> {
fn get_preprocessed_char(&mut self, mut c: char, input: &BufferQueue) -> Option<char> {
if self.ignore_lf {
self.ignore_lf = false;
if c == '\n' {
Expand Down Expand Up @@ -283,7 +283,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {

//§ tokenization
// Get the next input character, if one is available.
fn get_char(&mut self, input: & BufferQueue) -> Option<char> {
fn get_char(&mut self, input: &BufferQueue) -> Option<char> {
if self.reconsume {
self.reconsume = false;
Some(self.current_char)
Expand All @@ -294,7 +294,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
}

fn pop_except_from(&mut self, input: & BufferQueue, set: SmallCharSet) -> Option<SetResult> {
fn pop_except_from(&mut self, input: &BufferQueue, set: SmallCharSet) -> Option<SetResult> {
// Bail to the slow path for various corner cases.
// This means that `FromSet` can contain characters not in the set!
// It shouldn't matter because the fallback `FromSet` case should
Expand All @@ -321,7 +321,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
// NB: this doesn't set the current input character.
fn eat(
&mut self,
input: & BufferQueue,
input: &BufferQueue,
pat: &str,
eq: fn(&u8, &u8) -> bool,
) -> Option<bool> {
Expand All @@ -346,7 +346,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}

/// Run the state machine for as long as we can.
fn run(&mut self, input: & BufferQueue) -> TokenizerResult<Sink::Handle> {
fn run(&mut self, input: &BufferQueue) -> TokenizerResult<Sink::Handle> {
if self.opts.profile {
loop {
let state = self.state;
Expand Down Expand Up @@ -569,7 +569,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
}

fn discard_char(&mut self, input: & BufferQueue) {
fn discard_char(&mut self, input: &BufferQueue) {
// peek() deals in un-processed characters (no newline normalization), while get_char()
// does.
//
Expand Down Expand Up @@ -698,7 +698,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
// Return true if we should be immediately re-invoked
// (this just simplifies control flow vs. break / continue).
#[allow(clippy::never_loop)]
fn step(&mut self, input: & BufferQueue) -> ProcessResult<Sink::Handle> {
fn step(&mut self, input: &BufferQueue) -> ProcessResult<Sink::Handle> {
if self.char_ref_tokenizer.is_some() {
return self.step_char_ref_tokenizer(input);
}
Expand Down Expand Up @@ -1384,7 +1384,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
}

fn step_char_ref_tokenizer(&mut self, input: & BufferQueue) -> ProcessResult<Sink::Handle> {
fn step_char_ref_tokenizer(&mut self, input: &BufferQueue) -> ProcessResult<Sink::Handle> {
// FIXME HACK: Take and replace the tokenizer so we don't
// double-mut-borrow self. This is why it's boxed.
let mut tok = self.char_ref_tokenizer.take().unwrap();
Expand Down
22 changes: 11 additions & 11 deletions xml5ever/src/tokenizer/char_ref/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ impl CharRefTokenizer {
pub fn step<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
if self.result.is_some() {
return Done;
Expand All @@ -136,7 +136,7 @@ impl CharRefTokenizer {
fn do_begin<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
'\t' | '\n' | '\x0C' | ' ' | '<' | '&' => self.finish_none(),
Expand All @@ -159,7 +159,7 @@ impl CharRefTokenizer {
fn do_octothorpe<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
match c {
Expand All @@ -181,7 +181,7 @@ impl CharRefTokenizer {
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
base: u32,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
match c.to_digit(base) {
Expand Down Expand Up @@ -210,7 +210,7 @@ impl CharRefTokenizer {
fn do_numeric_semicolon<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
';' => tokenizer.discard_char(input),
Expand All @@ -224,7 +224,7 @@ impl CharRefTokenizer {
fn unconsume_numeric<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
let mut unconsume = StrTendril::from_char('#');
if let Some(c) = self.hex_marker {
Expand Down Expand Up @@ -273,7 +273,7 @@ impl CharRefTokenizer {
fn do_named<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
let c = unwrap_or_return!(tokenizer.get_char(input), Stuck);
self.name_buf_mut().push_char(c);
Expand Down Expand Up @@ -307,7 +307,7 @@ impl CharRefTokenizer {
fn unconsume_name<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) {
tokenizer.unconsume(input, self.name_buf_opt.take().unwrap());
}
Expand All @@ -316,7 +316,7 @@ impl CharRefTokenizer {
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
end_char: Option<char>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
match self.name_match {
None => {
Expand Down Expand Up @@ -404,7 +404,7 @@ impl CharRefTokenizer {
fn do_bogus_name<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) -> Status {
let c = unwrap_or_return!(tokenizer.get_char(input), Stuck);
self.name_buf_mut().push_char(c);
Expand All @@ -420,7 +420,7 @@ impl CharRefTokenizer {
pub fn end_of_file<Sink: TokenSink>(
&mut self,
tokenizer: &mut XmlTokenizer<Sink>,
input: & BufferQueue,
input: &BufferQueue,
) {
while self.result.is_none() {
match self.state {
Expand Down
22 changes: 11 additions & 11 deletions xml5ever/src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
}

/// Feed an input string into the tokenizer.
pub fn feed(&mut self, input: & BufferQueue) {
pub fn feed(&mut self, input: &BufferQueue) {
if input.is_empty() {
return;
}
Expand Down Expand Up @@ -229,7 +229,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {

// Get the next input character, which might be the character
// 'c' that we already consumed from the buffers.
fn get_preprocessed_char(&mut self, mut c: char, input: & BufferQueue) -> Option<char> {
fn get_preprocessed_char(&mut self, mut c: char, input: &BufferQueue) -> Option<char> {
if self.ignore_lf {
self.ignore_lf = false;
if c == '\n' {
Expand Down Expand Up @@ -274,7 +274,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
self.emit_error(msg);
}

fn pop_except_from(&mut self, input: & BufferQueue, set: SmallCharSet) -> Option<SetResult> {
fn pop_except_from(&mut self, input: &BufferQueue, set: SmallCharSet) -> Option<SetResult> {
// Bail to the slow path for various corner cases.
// This means that `FromSet` can contain characters not in the set!
// It shouldn't matter because the fallback `FromSet` case should
Expand All @@ -300,7 +300,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
//
// NB: this doesn't do input stream preprocessing or set the current input
// character.
fn eat(&mut self, input: & BufferQueue, pat: &str) -> Option<bool> {
fn eat(&mut self, input: &BufferQueue, pat: &str) -> Option<bool> {
input.push_front(replace(&mut self.temp_buf, StrTendril::new()));
match input.eat(pat, u8::eq_ignore_ascii_case) {
None if self.at_eof => Some(false),
Expand All @@ -315,7 +315,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
}

/// Run the state machine for as long as we can.
pub fn run(&mut self, input: & BufferQueue) {
pub fn run(&mut self, input: &BufferQueue) {
if self.opts.profile {
loop {
let state = self.state;
Expand Down Expand Up @@ -344,7 +344,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {

//§ tokenization
// Get the next input character, if one is available.
fn get_char(&mut self, input: & BufferQueue) -> Option<char> {
fn get_char(&mut self, input: &BufferQueue) -> Option<char> {
if self.reconsume {
self.reconsume = false;
Some(self.current_char)
Expand Down Expand Up @@ -497,20 +497,20 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
}
}

fn peek(&mut self, input: & BufferQueue) -> Option<char> {
fn peek(&mut self, input: &BufferQueue) -> Option<char> {
if self.reconsume {
Some(self.current_char)
} else {
input.peek()
}
}

fn discard_char(&mut self, input: & BufferQueue) {
fn discard_char(&mut self, input: &BufferQueue) {
let c = self.get_char(input);
assert!(c.is_some());
}

fn unconsume(&mut self, input: & BufferQueue, buf: StrTendril) {
fn unconsume(&mut self, input: &BufferQueue, buf: StrTendril) {
input.push_front(buf);
}
}
Expand Down Expand Up @@ -640,7 +640,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
// Return true if we should be immediately re-invoked
// (this just simplifies control flow vs. break / continue).
#[allow(clippy::never_loop)]
fn step(&mut self, input: & BufferQueue) -> bool {
fn step(&mut self, input: &BufferQueue) -> bool {
if self.char_ref_tokenizer.is_some() {
return self.step_char_ref_tokenizer(input);
}
Expand Down Expand Up @@ -1206,7 +1206,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
}
}

fn step_char_ref_tokenizer(&mut self, input: & BufferQueue) -> bool {
fn step_char_ref_tokenizer(&mut self, input: &BufferQueue) -> bool {
let mut tok = self.char_ref_tokenizer.take().unwrap();
let outcome = tok.step(self, input);

Expand Down

0 comments on commit d86b273

Please sign in to comment.