Skip to content

Commit 28f2d2f

Browse files
authored
Check formatting and Clippy lints in CI (#520)
* Remove unstable rustfmt options * Fix formatting with cargo fmt * Apply clippy suggestions * Check formatting and clippy in CI
1 parent 2294ed0 commit 28f2d2f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+510
-574
lines changed

.github/workflows/main.yml

+22
Original file line numberDiff line numberDiff line change
@@ -70,3 +70,25 @@ jobs:
7070
- name: Mark the job as unsuccessful
7171
run: exit 1
7272
if: ${{ !success() }}
73+
74+
lint:
75+
name: Lint
76+
runs-on: ubuntu-latest
77+
steps:
78+
- uses: actions/checkout@v3
79+
80+
- name: Install stable toolchain
81+
run: |
82+
rustup set profile minimal
83+
rustup override set stable
84+
85+
- name: Install clippy
86+
run: |
87+
rustup component add clippy
88+
rustup component add rustfmt
89+
90+
- name: Format
91+
run: cargo fmt --all -- --check
92+
93+
- name: Run clippy
94+
run: cargo clippy --all-features --all-targets -- -D warnings

html5ever/benches/html5ever.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,11 @@ fn run_bench(c: &mut Criterion, name: &str) {
2727
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
2828
path.push("data/bench/");
2929
path.push(name);
30-
let mut file = fs::File::open(&path).ok().expect("can't open file");
30+
let mut file = fs::File::open(&path).expect("can't open file");
3131

3232
// Read the file and treat it as an infinitely repeating sequence of characters.
3333
let mut file_input = ByteTendril::new();
3434
file.read_to_tendril(&mut file_input)
35-
.ok()
3635
.expect("can't read file");
3736
let file_input: StrTendril = file_input.try_reinterpret().unwrap();
3837
let size = file_input.len();
@@ -55,7 +54,7 @@ fn run_bench(c: &mut Criterion, name: &str) {
5554
c.bench_function(&test_name, move |b| {
5655
b.iter(|| {
5756
let mut tok = Tokenizer::new(Sink, Default::default());
58-
let mut buffer = BufferQueue::new();
57+
let mut buffer = BufferQueue::default();
5958
// We are doing clone inside the bench function, this is not ideal, but possibly
6059
// necessary since our iterator consumes the underlying buffer.
6160
for buf in input.clone().into_iter() {

html5ever/examples/arena.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ fn main() {
2828

2929
fn html5ever_parse_slice_into_arena<'a>(bytes: &[u8], arena: Arena<'a>) -> Ref<'a> {
3030
let sink = Sink {
31-
arena: arena,
31+
arena,
3232
document: arena.alloc(Node::new(NodeData::Document)),
3333
quirks_mode: QuirksMode::NoQuirks,
3434
};
@@ -91,7 +91,7 @@ impl<'arena> Node<'arena> {
9191
next_sibling: Cell::new(None),
9292
first_child: Cell::new(None),
9393
last_child: Cell::new(None),
94-
data: data,
94+
data,
9595
}
9696
}
9797

@@ -209,7 +209,7 @@ impl<'arena> TreeSink for Sink<'arena> {
209209

210210
fn get_template_contents(&mut self, target: &Ref<'arena>) -> Ref<'arena> {
211211
if let NodeData::Element {
212-
template_contents: Some(ref contents),
212+
template_contents: Some(contents),
213213
..
214214
} = target.data
215215
{
@@ -255,7 +255,7 @@ impl<'arena> TreeSink for Sink<'arena> {
255255

256256
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Ref<'arena> {
257257
self.new_node(NodeData::ProcessingInstruction {
258-
target: target,
258+
target,
259259
contents: data,
260260
})
261261
}

html5ever/examples/noop-tokenize.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ impl TokenSink for Sink {
3232
fn main() {
3333
let mut chunk = ByteTendril::new();
3434
io::stdin().read_to_tendril(&mut chunk).unwrap();
35-
let mut input = BufferQueue::new();
35+
let mut input = BufferQueue::default();
3636
input.push_back(chunk.try_reinterpret().unwrap());
3737

3838
let mut tok = Tokenizer::new(Sink(Vec::new()), Default::default());

html5ever/examples/noop-tree-builder.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ impl TreeSink for Sink {
4444
}
4545

4646
fn get_template_contents(&mut self, target: &usize) -> usize {
47-
if let Some(expanded_name!(html "template")) = self.names.get(&target).map(|n| n.expanded())
47+
if let Some(expanded_name!(html "template")) = self.names.get(target).map(|n| n.expanded())
4848
{
4949
target + 1
5050
} else {
@@ -91,7 +91,7 @@ impl TreeSink for Sink {
9191

9292
fn append_doctype_to_document(&mut self, _: StrTendril, _: StrTendril, _: StrTendril) {}
9393
fn add_attrs_if_missing(&mut self, target: &usize, _attrs: Vec<Attribute>) {
94-
assert!(self.names.contains_key(&target), "not an element");
94+
assert!(self.names.contains_key(target), "not an element");
9595
}
9696
fn remove_from_parent(&mut self, _target: &usize) {}
9797
fn reparent_children(&mut self, _node: &usize, _new_parent: &usize) {}

html5ever/examples/tokenize.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ fn main() {
8585
let mut sink = TokenPrinter { in_char_run: false };
8686
let mut chunk = ByteTendril::new();
8787
io::stdin().read_to_tendril(&mut chunk).unwrap();
88-
let mut input = BufferQueue::new();
88+
let mut input = BufferQueue::default();
8989
input.push_back(chunk.try_reinterpret().unwrap());
9090

9191
let mut tok = Tokenizer::new(

html5ever/macros/match_token.rs

+29-35
Original file line numberDiff line numberDiff line change
@@ -141,16 +141,16 @@ struct MatchToken {
141141

142142
struct MatchTokenArm {
143143
binding: Option<syn::Ident>,
144-
lhs: LHS,
145-
rhs: RHS,
144+
lhs: Lhs,
145+
rhs: Rhs,
146146
}
147147

148-
enum LHS {
148+
enum Lhs {
149149
Tags(Vec<Tag>),
150150
Pattern(syn::Pat),
151151
}
152152

153-
enum RHS {
153+
enum Rhs {
154154
Expression(syn::Expr),
155155
Else,
156156
}
@@ -188,17 +188,17 @@ impl Parse for Tag {
188188
}
189189
}
190190

191-
impl Parse for LHS {
191+
impl Parse for Lhs {
192192
fn parse(input: ParseStream) -> Result<Self> {
193193
if input.peek(Token![<]) {
194194
let mut tags = Vec::new();
195195
while !input.peek(Token![=>]) {
196196
tags.push(input.parse()?);
197197
}
198-
Ok(LHS::Tags(tags))
198+
Ok(Lhs::Tags(tags))
199199
} else {
200200
let p = input.call(syn::Pat::parse_single)?;
201-
Ok(LHS::Pattern(p))
201+
Ok(Lhs::Pattern(p))
202202
}
203203
}
204204
}
@@ -212,7 +212,7 @@ impl Parse for MatchTokenArm {
212212
} else {
213213
None
214214
};
215-
let lhs = input.parse::<LHS>()?;
215+
let lhs = input.parse::<Lhs>()?;
216216
input.parse::<Token![=>]>()?;
217217
let rhs = if input.peek(syn::token::Brace) {
218218
let block = input.parse::<syn::Block>().unwrap();
@@ -222,15 +222,15 @@ impl Parse for MatchTokenArm {
222222
block,
223223
};
224224
input.parse::<Option<Token![,]>>()?;
225-
RHS::Expression(syn::Expr::Block(block))
225+
Rhs::Expression(syn::Expr::Block(block))
226226
} else if input.peek(Token![else]) {
227227
input.parse::<Token![else]>()?;
228228
input.parse::<Token![,]>()?;
229-
RHS::Else
229+
Rhs::Else
230230
} else {
231231
let expr = input.parse::<syn::Expr>().unwrap();
232232
input.parse::<Option<Token![,]>>()?;
233-
RHS::Expression(expr)
233+
Rhs::Expression(expr)
234234
};
235235

236236
Ok(MatchTokenArm { binding, lhs, rhs })
@@ -283,12 +283,12 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
283283
};
284284

285285
match (lhs, rhs) {
286-
(LHS::Pattern(_), RHS::Else) => {
286+
(Lhs::Pattern(_), Rhs::Else) => {
287287
panic!("'else' may not appear with an ordinary pattern")
288288
},
289289

290290
// ordinary pattern => expression
291-
(LHS::Pattern(pat), RHS::Expression(expr)) => {
291+
(Lhs::Pattern(pat), Rhs::Expression(expr)) => {
292292
if !wildcards_patterns.is_empty() {
293293
panic!(
294294
"ordinary patterns may not appear after wildcard tags {:?} {:?}",
@@ -299,7 +299,7 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
299299
},
300300

301301
// <tag> <tag> ... => else
302-
(LHS::Tags(tags), RHS::Else) => {
302+
(Lhs::Tags(tags), Rhs::Else) => {
303303
for tag in tags {
304304
if !seen_tags.insert(tag.clone()) {
305305
panic!("duplicate tag");
@@ -313,7 +313,7 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
313313

314314
// <_> => expression
315315
// <tag> <tag> ... => expression
316-
(LHS::Tags(tags), RHS::Expression(expr)) => {
316+
(Lhs::Tags(tags), Rhs::Expression(expr)) => {
317317
// Is this arm a tag wildcard?
318318
// `None` if we haven't processed the first tag yet.
319319
let mut wildcard = None;
@@ -388,9 +388,9 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
388388

389389
let (last_pat, last_expr) = match (binding, lhs, rhs) {
390390
(Some(_), _, _) => panic!("the last arm cannot have an @-binding"),
391-
(None, LHS::Tags(_), _) => panic!("the last arm cannot have tag patterns"),
392-
(None, _, RHS::Else) => panic!("the last arm cannot use 'else'"),
393-
(None, LHS::Pattern(p), RHS::Expression(e)) => (p, e),
391+
(None, Lhs::Tags(_), _) => panic!("the last arm cannot have tag patterns"),
392+
(None, _, Rhs::Else) => panic!("the last arm cannot use 'else'"),
393+
(None, Lhs::Pattern(p), Rhs::Expression(e)) => (p, e),
394394
};
395395

396396
quote! {
@@ -418,29 +418,23 @@ fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
418418

419419
impl Fold for MatchTokenParser {
420420
fn fold_stmt(&mut self, stmt: syn::Stmt) -> syn::Stmt {
421-
match stmt {
422-
syn::Stmt::Item(syn::Item::Macro(syn::ItemMacro { ref mac, .. })) => {
423-
if mac.path == parse_quote!(match_token) {
424-
return syn::fold::fold_stmt(
425-
self,
426-
syn::Stmt::Expr(expand_match_token(&mac.tokens), None),
427-
);
428-
}
429-
},
430-
_ => {},
421+
if let syn::Stmt::Item(syn::Item::Macro(syn::ItemMacro { ref mac, .. })) = stmt {
422+
if mac.path == parse_quote!(match_token) {
423+
return syn::fold::fold_stmt(
424+
self,
425+
syn::Stmt::Expr(expand_match_token(&mac.tokens), None),
426+
);
427+
}
431428
}
432429

433430
syn::fold::fold_stmt(self, stmt)
434431
}
435432

436433
fn fold_expr(&mut self, expr: syn::Expr) -> syn::Expr {
437-
match expr {
438-
syn::Expr::Macro(syn::ExprMacro { ref mac, .. }) => {
439-
if mac.path == parse_quote!(match_token) {
440-
return syn::fold::fold_expr(self, expand_match_token(&mac.tokens));
441-
}
442-
},
443-
_ => {},
434+
if let syn::Expr::Macro(syn::ExprMacro { ref mac, .. }) = expr {
435+
if mac.path == parse_quote!(match_token) {
436+
return syn::fold::fold_expr(self, expand_match_token(&mac.tokens));
437+
}
444438
}
445439

446440
syn::fold::fold_expr(self, expr)

html5ever/src/driver.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ where
4545
let tok = Tokenizer::new(tb, opts.tokenizer);
4646
Parser {
4747
tokenizer: tok,
48-
input_buffer: BufferQueue::new(),
48+
input_buffer: BufferQueue::default(),
4949
}
5050
}
5151

@@ -88,7 +88,7 @@ where
8888
let tok = Tokenizer::new(tb, tok_opts);
8989
Parser {
9090
tokenizer: tok,
91-
input_buffer: BufferQueue::new(),
91+
input_buffer: BufferQueue::default(),
9292
}
9393
}
9494

html5ever/src/serialize/mod.rs

+30-30
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ impl Default for SerializeOpts {
5252
#[derive(Default)]
5353
struct ElemInfo {
5454
html_name: Option<LocalName>,
55-
ignore_children: bool
55+
ignore_children: bool,
5656
}
5757

5858
pub struct HtmlSerializer<Wr: Write> {
@@ -162,28 +162,28 @@ impl<Wr: Write> Serializer for HtmlSerializer<Wr> {
162162
}
163163
self.writer.write_all(b">")?;
164164

165-
let ignore_children = name.ns == ns!(html) &&
166-
match name.local {
167-
local_name!("area") |
168-
local_name!("base") |
169-
local_name!("basefont") |
170-
local_name!("bgsound") |
171-
local_name!("br") |
172-
local_name!("col") |
173-
local_name!("embed") |
174-
local_name!("frame") |
175-
local_name!("hr") |
176-
local_name!("img") |
177-
local_name!("input") |
178-
local_name!("keygen") |
179-
local_name!("link") |
180-
local_name!("meta") |
181-
local_name!("param") |
182-
local_name!("source") |
183-
local_name!("track") |
184-
local_name!("wbr") => true,
185-
_ => false,
186-
};
165+
let ignore_children = name.ns == ns!(html)
166+
&& matches!(
167+
name.local,
168+
local_name!("area")
169+
| local_name!("base")
170+
| local_name!("basefont")
171+
| local_name!("bgsound")
172+
| local_name!("br")
173+
| local_name!("col")
174+
| local_name!("embed")
175+
| local_name!("frame")
176+
| local_name!("hr")
177+
| local_name!("img")
178+
| local_name!("input")
179+
| local_name!("keygen")
180+
| local_name!("link")
181+
| local_name!("meta")
182+
| local_name!("param")
183+
| local_name!("source")
184+
| local_name!("track")
185+
| local_name!("wbr")
186+
);
187187

188188
self.stack.push(ElemInfo {
189189
html_name,
@@ -213,13 +213,13 @@ impl<Wr: Write> Serializer for HtmlSerializer<Wr> {
213213

214214
fn write_text(&mut self, text: &str) -> io::Result<()> {
215215
let escape = match self.parent().html_name {
216-
Some(local_name!("style")) |
217-
Some(local_name!("script")) |
218-
Some(local_name!("xmp")) |
219-
Some(local_name!("iframe")) |
220-
Some(local_name!("noembed")) |
221-
Some(local_name!("noframes")) |
222-
Some(local_name!("plaintext")) => false,
216+
Some(local_name!("style"))
217+
| Some(local_name!("script"))
218+
| Some(local_name!("xmp"))
219+
| Some(local_name!("iframe"))
220+
| Some(local_name!("noembed"))
221+
| Some(local_name!("noframes"))
222+
| Some(local_name!("plaintext")) => false,
223223

224224
Some(local_name!("noscript")) => !self.opts.scripting_enabled,
225225

html5ever/src/tokenizer/char_ref/mod.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -224,9 +224,8 @@ impl CharRefTokenizer {
224224
input: &mut BufferQueue,
225225
) -> Status {
226226
let mut unconsume = StrTendril::from_char('#');
227-
match self.hex_marker {
228-
Some(c) => unconsume.push_char(c),
229-
None => (),
227+
if let Some(c) = self.hex_marker {
228+
unconsume.push_char(c)
230229
}
231230

232231
input.push_front(unconsume);
@@ -361,7 +360,8 @@ impl CharRefTokenizer {
361360
// then, for historical reasons, flush code points consumed as a character
362361
// reference and switch to the return state.
363362

364-
let unconsume_all = match (self.is_consumed_in_attribute, last_matched, next_after) {
363+
let unconsume_all = match (self.is_consumed_in_attribute, last_matched, next_after)
364+
{
365365
(_, ';', _) => false,
366366
(true, _, Some('=')) => true,
367367
(true, _, Some(c)) if c.is_ascii_alphanumeric() => true,

0 commit comments

Comments
 (0)