Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Check formatting and Clippy lints in CI #520

Merged
merged 4 commits into from
Mar 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,3 +70,25 @@ jobs:
- name: Mark the job as unsuccessful
run: exit 1
if: ${{ !success() }}

lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Install stable toolchain
run: |
rustup set profile minimal
rustup override set stable
- name: Install clippy
run: |
rustup component add clippy
rustup component add rustfmt
- name: Format
run: cargo fmt --all -- --check

- name: Run clippy
run: cargo clippy --all-features --all-targets -- -D warnings
5 changes: 2 additions & 3 deletions html5ever/benches/html5ever.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,11 @@ fn run_bench(c: &mut Criterion, name: &str) {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("data/bench/");
path.push(name);
let mut file = fs::File::open(&path).ok().expect("can't open file");
let mut file = fs::File::open(&path).expect("can't open file");

// Read the file and treat it as an infinitely repeating sequence of characters.
let mut file_input = ByteTendril::new();
file.read_to_tendril(&mut file_input)
.ok()
.expect("can't read file");
let file_input: StrTendril = file_input.try_reinterpret().unwrap();
let size = file_input.len();
Expand All @@ -55,7 +54,7 @@ fn run_bench(c: &mut Criterion, name: &str) {
c.bench_function(&test_name, move |b| {
b.iter(|| {
let mut tok = Tokenizer::new(Sink, Default::default());
let mut buffer = BufferQueue::new();
let mut buffer = BufferQueue::default();
// We are doing clone inside the bench function, this is not ideal, but possibly
// necessary since our iterator consumes the underlying buffer.
for buf in input.clone().into_iter() {
Expand Down
8 changes: 4 additions & 4 deletions html5ever/examples/arena.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ fn main() {

fn html5ever_parse_slice_into_arena<'a>(bytes: &[u8], arena: Arena<'a>) -> Ref<'a> {
let sink = Sink {
arena: arena,
arena,
document: arena.alloc(Node::new(NodeData::Document)),
quirks_mode: QuirksMode::NoQuirks,
};
Expand Down Expand Up @@ -91,7 +91,7 @@ impl<'arena> Node<'arena> {
next_sibling: Cell::new(None),
first_child: Cell::new(None),
last_child: Cell::new(None),
data: data,
data,
}
}

Expand Down Expand Up @@ -209,7 +209,7 @@ impl<'arena> TreeSink for Sink<'arena> {

fn get_template_contents(&mut self, target: &Ref<'arena>) -> Ref<'arena> {
if let NodeData::Element {
template_contents: Some(ref contents),
template_contents: Some(contents),
..
} = target.data
{
Expand Down Expand Up @@ -255,7 +255,7 @@ impl<'arena> TreeSink for Sink<'arena> {

fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Ref<'arena> {
self.new_node(NodeData::ProcessingInstruction {
target: target,
target,
contents: data,
})
}
Expand Down
2 changes: 1 addition & 1 deletion html5ever/examples/noop-tokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl TokenSink for Sink {
fn main() {
let mut chunk = ByteTendril::new();
io::stdin().read_to_tendril(&mut chunk).unwrap();
let mut input = BufferQueue::new();
let mut input = BufferQueue::default();
input.push_back(chunk.try_reinterpret().unwrap());

let mut tok = Tokenizer::new(Sink(Vec::new()), Default::default());
Expand Down
4 changes: 2 additions & 2 deletions html5ever/examples/noop-tree-builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl TreeSink for Sink {
}

fn get_template_contents(&mut self, target: &usize) -> usize {
if let Some(expanded_name!(html "template")) = self.names.get(&target).map(|n| n.expanded())
if let Some(expanded_name!(html "template")) = self.names.get(target).map(|n| n.expanded())
{
target + 1
} else {
Expand Down Expand Up @@ -91,7 +91,7 @@ impl TreeSink for Sink {

fn append_doctype_to_document(&mut self, _: StrTendril, _: StrTendril, _: StrTendril) {}
fn add_attrs_if_missing(&mut self, target: &usize, _attrs: Vec<Attribute>) {
assert!(self.names.contains_key(&target), "not an element");
assert!(self.names.contains_key(target), "not an element");
}
fn remove_from_parent(&mut self, _target: &usize) {}
fn reparent_children(&mut self, _node: &usize, _new_parent: &usize) {}
Expand Down
2 changes: 1 addition & 1 deletion html5ever/examples/tokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ fn main() {
let mut sink = TokenPrinter { in_char_run: false };
let mut chunk = ByteTendril::new();
io::stdin().read_to_tendril(&mut chunk).unwrap();
let mut input = BufferQueue::new();
let mut input = BufferQueue::default();
input.push_back(chunk.try_reinterpret().unwrap());

let mut tok = Tokenizer::new(
Expand Down
64 changes: 29 additions & 35 deletions html5ever/macros/match_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,16 +141,16 @@ struct MatchToken {

struct MatchTokenArm {
binding: Option<syn::Ident>,
lhs: LHS,
rhs: RHS,
lhs: Lhs,
rhs: Rhs,
}

enum LHS {
enum Lhs {
Tags(Vec<Tag>),
Pattern(syn::Pat),
}

enum RHS {
enum Rhs {
Expression(syn::Expr),
Else,
}
Expand Down Expand Up @@ -188,17 +188,17 @@ impl Parse for Tag {
}
}

impl Parse for LHS {
impl Parse for Lhs {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(Token![<]) {
let mut tags = Vec::new();
while !input.peek(Token![=>]) {
tags.push(input.parse()?);
}
Ok(LHS::Tags(tags))
Ok(Lhs::Tags(tags))
} else {
let p = input.call(syn::Pat::parse_single)?;
Ok(LHS::Pattern(p))
Ok(Lhs::Pattern(p))
}
}
}
Expand All @@ -212,7 +212,7 @@ impl Parse for MatchTokenArm {
} else {
None
};
let lhs = input.parse::<LHS>()?;
let lhs = input.parse::<Lhs>()?;
input.parse::<Token![=>]>()?;
let rhs = if input.peek(syn::token::Brace) {
let block = input.parse::<syn::Block>().unwrap();
Expand All @@ -222,15 +222,15 @@ impl Parse for MatchTokenArm {
block,
};
input.parse::<Option<Token![,]>>()?;
RHS::Expression(syn::Expr::Block(block))
Rhs::Expression(syn::Expr::Block(block))
} else if input.peek(Token![else]) {
input.parse::<Token![else]>()?;
input.parse::<Token![,]>()?;
RHS::Else
Rhs::Else
} else {
let expr = input.parse::<syn::Expr>().unwrap();
input.parse::<Option<Token![,]>>()?;
RHS::Expression(expr)
Rhs::Expression(expr)
};

Ok(MatchTokenArm { binding, lhs, rhs })
Expand Down Expand Up @@ -283,12 +283,12 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
};

match (lhs, rhs) {
(LHS::Pattern(_), RHS::Else) => {
(Lhs::Pattern(_), Rhs::Else) => {
panic!("'else' may not appear with an ordinary pattern")
},

// ordinary pattern => expression
(LHS::Pattern(pat), RHS::Expression(expr)) => {
(Lhs::Pattern(pat), Rhs::Expression(expr)) => {
if !wildcards_patterns.is_empty() {
panic!(
"ordinary patterns may not appear after wildcard tags {:?} {:?}",
Expand All @@ -299,7 +299,7 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
},

// <tag> <tag> ... => else
(LHS::Tags(tags), RHS::Else) => {
(Lhs::Tags(tags), Rhs::Else) => {
for tag in tags {
if !seen_tags.insert(tag.clone()) {
panic!("duplicate tag");
Expand All @@ -313,7 +313,7 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {

// <_> => expression
// <tag> <tag> ... => expression
(LHS::Tags(tags), RHS::Expression(expr)) => {
(Lhs::Tags(tags), Rhs::Expression(expr)) => {
// Is this arm a tag wildcard?
// `None` if we haven't processed the first tag yet.
let mut wildcard = None;
Expand Down Expand Up @@ -388,9 +388,9 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {

let (last_pat, last_expr) = match (binding, lhs, rhs) {
(Some(_), _, _) => panic!("the last arm cannot have an @-binding"),
(None, LHS::Tags(_), _) => panic!("the last arm cannot have tag patterns"),
(None, _, RHS::Else) => panic!("the last arm cannot use 'else'"),
(None, LHS::Pattern(p), RHS::Expression(e)) => (p, e),
(None, Lhs::Tags(_), _) => panic!("the last arm cannot have tag patterns"),
(None, _, Rhs::Else) => panic!("the last arm cannot use 'else'"),
(None, Lhs::Pattern(p), Rhs::Expression(e)) => (p, e),
};

quote! {
Expand Down Expand Up @@ -418,29 +418,23 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {

impl Fold for MatchTokenParser {
fn fold_stmt(&mut self, stmt: syn::Stmt) -> syn::Stmt {
match stmt {
syn::Stmt::Item(syn::Item::Macro(syn::ItemMacro { ref mac, .. })) => {
if mac.path == parse_quote!(match_token) {
return syn::fold::fold_stmt(
self,
syn::Stmt::Expr(expand_match_token(&mac.tokens), None),
);
}
},
_ => {},
if let syn::Stmt::Item(syn::Item::Macro(syn::ItemMacro { ref mac, .. })) = stmt {
if mac.path == parse_quote!(match_token) {
return syn::fold::fold_stmt(
self,
syn::Stmt::Expr(expand_match_token(&mac.tokens), None),
);
}
}

syn::fold::fold_stmt(self, stmt)
}

fn fold_expr(&mut self, expr: syn::Expr) -> syn::Expr {
match expr {
syn::Expr::Macro(syn::ExprMacro { ref mac, .. }) => {
if mac.path == parse_quote!(match_token) {
return syn::fold::fold_expr(self, expand_match_token(&mac.tokens));
}
},
_ => {},
if let syn::Expr::Macro(syn::ExprMacro { ref mac, .. }) = expr {
if mac.path == parse_quote!(match_token) {
return syn::fold::fold_expr(self, expand_match_token(&mac.tokens));
}
}

syn::fold::fold_expr(self, expr)
Expand Down
4 changes: 2 additions & 2 deletions html5ever/src/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ where
let tok = Tokenizer::new(tb, opts.tokenizer);
Parser {
tokenizer: tok,
input_buffer: BufferQueue::new(),
input_buffer: BufferQueue::default(),
}
}

Expand Down Expand Up @@ -88,7 +88,7 @@ where
let tok = Tokenizer::new(tb, tok_opts);
Parser {
tokenizer: tok,
input_buffer: BufferQueue::new(),
input_buffer: BufferQueue::default(),
}
}

Expand Down
60 changes: 30 additions & 30 deletions html5ever/src/serialize/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ impl Default for SerializeOpts {
#[derive(Default)]
struct ElemInfo {
html_name: Option<LocalName>,
ignore_children: bool
ignore_children: bool,
}

pub struct HtmlSerializer<Wr: Write> {
Expand Down Expand Up @@ -162,28 +162,28 @@ impl<Wr: Write> Serializer for HtmlSerializer<Wr> {
}
self.writer.write_all(b">")?;

let ignore_children = name.ns == ns!(html) &&
match name.local {
local_name!("area") |
local_name!("base") |
local_name!("basefont") |
local_name!("bgsound") |
local_name!("br") |
local_name!("col") |
local_name!("embed") |
local_name!("frame") |
local_name!("hr") |
local_name!("img") |
local_name!("input") |
local_name!("keygen") |
local_name!("link") |
local_name!("meta") |
local_name!("param") |
local_name!("source") |
local_name!("track") |
local_name!("wbr") => true,
_ => false,
};
let ignore_children = name.ns == ns!(html)
&& matches!(
name.local,
local_name!("area")
| local_name!("base")
| local_name!("basefont")
| local_name!("bgsound")
| local_name!("br")
| local_name!("col")
| local_name!("embed")
| local_name!("frame")
| local_name!("hr")
| local_name!("img")
| local_name!("input")
| local_name!("keygen")
| local_name!("link")
| local_name!("meta")
| local_name!("param")
| local_name!("source")
| local_name!("track")
| local_name!("wbr")
);

self.stack.push(ElemInfo {
html_name,
Expand Down Expand Up @@ -213,13 +213,13 @@ impl<Wr: Write> Serializer for HtmlSerializer<Wr> {

fn write_text(&mut self, text: &str) -> io::Result<()> {
let escape = match self.parent().html_name {
Some(local_name!("style")) |
Some(local_name!("script")) |
Some(local_name!("xmp")) |
Some(local_name!("iframe")) |
Some(local_name!("noembed")) |
Some(local_name!("noframes")) |
Some(local_name!("plaintext")) => false,
Some(local_name!("style"))
| Some(local_name!("script"))
| Some(local_name!("xmp"))
| Some(local_name!("iframe"))
| Some(local_name!("noembed"))
| Some(local_name!("noframes"))
| Some(local_name!("plaintext")) => false,

Some(local_name!("noscript")) => !self.opts.scripting_enabled,

Expand Down
8 changes: 4 additions & 4 deletions html5ever/src/tokenizer/char_ref/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,9 +224,8 @@ impl CharRefTokenizer {
input: &mut BufferQueue,
) -> Status {
let mut unconsume = StrTendril::from_char('#');
match self.hex_marker {
Some(c) => unconsume.push_char(c),
None => (),
if let Some(c) = self.hex_marker {
unconsume.push_char(c)
}

input.push_front(unconsume);
Expand Down Expand Up @@ -361,7 +360,8 @@ impl CharRefTokenizer {
// then, for historical reasons, flush code points consumed as a character
// reference and switch to the return state.

let unconsume_all = match (self.is_consumed_in_attribute, last_matched, next_after) {
let unconsume_all = match (self.is_consumed_in_attribute, last_matched, next_after)
{
(_, ';', _) => false,
(true, _, Some('=')) => true,
(true, _, Some(c)) if c.is_ascii_alphanumeric() => true,
Expand Down
Loading
Loading