Skip to content

Fix clippy lints. #396

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Nov 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 14 additions & 14 deletions src/cow_rc_str.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl<'a> From<&'a str> for CowRcStr<'a> {
}
}

impl<'a> From<String> for CowRcStr<'a> {
impl From<String> for CowRcStr<'_> {
#[inline]
fn from(s: String) -> Self {
CowRcStr::from_rc(Rc::new(s))
Expand Down Expand Up @@ -84,7 +84,7 @@ impl<'a> CowRcStr<'a> {
}
}

impl<'a> Clone for CowRcStr<'a> {
impl Clone for CowRcStr<'_> {
#[inline]
fn clone(&self) -> Self {
match self.unpack() {
Expand All @@ -99,7 +99,7 @@ impl<'a> Clone for CowRcStr<'a> {
}
}

impl<'a> Drop for CowRcStr<'a> {
impl Drop for CowRcStr<'_> {
#[inline]
fn drop(&mut self) {
if let Err(ptr) = self.unpack() {
Expand All @@ -108,7 +108,7 @@ impl<'a> Drop for CowRcStr<'a> {
}
}

impl<'a> ops::Deref for CowRcStr<'a> {
impl ops::Deref for CowRcStr<'_> {
type Target = str;

#[inline]
Expand All @@ -119,65 +119,65 @@ impl<'a> ops::Deref for CowRcStr<'a> {

// Boilerplate / trivial impls below.

impl<'a> AsRef<str> for CowRcStr<'a> {
impl AsRef<str> for CowRcStr<'_> {
#[inline]
fn as_ref(&self) -> &str {
self
}
}

impl<'a> Borrow<str> for CowRcStr<'a> {
impl Borrow<str> for CowRcStr<'_> {
#[inline]
fn borrow(&self) -> &str {
self
}
}

impl<'a> Default for CowRcStr<'a> {
impl Default for CowRcStr<'_> {
#[inline]
fn default() -> Self {
Self::from("")
}
}

impl<'a> hash::Hash for CowRcStr<'a> {
impl hash::Hash for CowRcStr<'_> {
#[inline]
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
str::hash(self, hasher)
}
}

impl<'a, T: AsRef<str>> PartialEq<T> for CowRcStr<'a> {
impl<T: AsRef<str>> PartialEq<T> for CowRcStr<'_> {
#[inline]
fn eq(&self, other: &T) -> bool {
str::eq(self, other.as_ref())
}
}

impl<'a, T: AsRef<str>> PartialOrd<T> for CowRcStr<'a> {
impl<T: AsRef<str>> PartialOrd<T> for CowRcStr<'_> {
#[inline]
fn partial_cmp(&self, other: &T) -> Option<cmp::Ordering> {
str::partial_cmp(self, other.as_ref())
}
}

impl<'a> Eq for CowRcStr<'a> {}
impl Eq for CowRcStr<'_> {}

impl<'a> Ord for CowRcStr<'a> {
impl Ord for CowRcStr<'_> {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
str::cmp(self, other)
}
}

impl<'a> fmt::Display for CowRcStr<'a> {
impl fmt::Display for CowRcStr<'_> {
#[inline]
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
str::fmt(self, formatter)
}
}

impl<'a> fmt::Debug for CowRcStr<'a> {
impl fmt::Debug for CowRcStr<'_> {
#[inline]
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
str::fmt(self, formatter)
Expand Down
8 changes: 4 additions & 4 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ pub enum BasicParseErrorKind<'i> {
QualifiedRuleInvalid,
}

impl<'i> fmt::Display for BasicParseErrorKind<'i> {
impl fmt::Display for BasicParseErrorKind<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
BasicParseErrorKind::UnexpectedToken(token) => {
Expand Down Expand Up @@ -176,7 +176,7 @@ impl<'i, T> ParseErrorKind<'i, T> {
}
}

impl<'i, E: fmt::Display> fmt::Display for ParseErrorKind<'i, E> {
impl<E: fmt::Display> fmt::Display for ParseErrorKind<'_, E> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ParseErrorKind::Basic(ref basic) => basic.fmt(f),
Expand Down Expand Up @@ -218,13 +218,13 @@ impl<'i, T> ParseError<'i, T> {
}
}

impl<'i, E: fmt::Display> fmt::Display for ParseError<'i, E> {
impl<E: fmt::Display> fmt::Display for ParseError<'_, E> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind.fmt(f)
}
}

impl<'i, E: fmt::Display + fmt::Debug> std::error::Error for ParseError<'i, E> {}
impl<E: fmt::Display + fmt::Debug> std::error::Error for ParseError<'_, E> {}

/// The owned input for a parser.
pub struct ParserInput<'i> {
Expand Down
4 changes: 2 additions & 2 deletions src/rules_and_declarations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ impl<'i, 't, 'a, P, I, E> RuleBodyParser<'i, 't, 'a, P, I, E> {
}

/// https://drafts.csswg.org/css-syntax/#consume-a-blocks-contents
impl<'i, 't, 'a, I, P, E: 'i> Iterator for RuleBodyParser<'i, 't, 'a, P, I, E>
impl<'i, I, P, E: 'i> Iterator for RuleBodyParser<'i, '_, '_, P, I, E>
where
P: RuleBodyItemParser<'i, I, E>,
{
Expand Down Expand Up @@ -349,7 +349,7 @@ where
}

/// `RuleListParser` is an iterator that yields `Ok(_)` for a rule or an `Err(..)` for an invalid one.
impl<'i, 't, 'a, R, P, E: 'i> Iterator for StyleSheetParser<'i, 't, 'a, P>
impl<'i, R, P, E: 'i> Iterator for StyleSheetParser<'i, '_, '_, P>
where
P: QualifiedRuleParser<'i, QualifiedRule = R, Error = E>
+ AtRuleParser<'i, AtRule = R, Error = E>,
Expand Down
6 changes: 3 additions & 3 deletions src/serializer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ where
Ok(())
}

impl<'a> ToCss for Token<'a> {
impl ToCss for Token<'_> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where
W: fmt::Write,
Expand Down Expand Up @@ -311,7 +311,7 @@ where
}
}

impl<'a, W> fmt::Write for CssStringWriter<'a, W>
impl<W> fmt::Write for CssStringWriter<'_, W>
where
W: fmt::Write,
{
Expand Down Expand Up @@ -539,7 +539,7 @@ impl TokenSerializationType {
}
}

impl<'a> Token<'a> {
impl Token<'_> {
/// Categorize a token into a type that determines when `/**/` needs to be inserted
/// between two tokens when serialized next to each other without whitespace in between.
///
Expand Down
4 changes: 2 additions & 2 deletions src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -773,7 +773,7 @@ where
}
}

impl<'a> ToJson for CowRcStr<'a> {
impl ToJson for CowRcStr<'_> {
fn to_json(&self) -> Value {
let s: &str = self;
s.to_json()
Expand Down Expand Up @@ -946,7 +946,7 @@ impl<'i> QualifiedRuleParser<'i> for JsonParser {
}
}

impl<'i> RuleBodyItemParser<'i, Value, ()> for JsonParser {
impl RuleBodyItemParser<'_, Value, ()> for JsonParser {
fn parse_qualified(&self) -> bool {
true
}
Expand Down
14 changes: 5 additions & 9 deletions src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ pub enum Token<'a> {
CloseCurlyBracket,
}

impl<'a> Token<'a> {
impl Token<'_> {
/// Return whether this token represents a parse error.
///
/// `BadUrl` and `BadString` are tokenizer-level parse errors.
Expand Down Expand Up @@ -324,11 +324,11 @@ impl<'a> Tokenizer<'a> {
let current = self.position();
let start = self
.slice(SourcePosition(0)..current)
.rfind(|c| matches!(c, '\r' | '\n' | '\x0C'))
.rfind(['\r', '\n', '\x0C'])
.map_or(0, |start| start + 1);
let end = self
.slice(current..SourcePosition(self.input.len()))
.find(|c| matches!(c, '\r' | '\n' | '\x0C'))
.find(['\r', '\n', '\x0C'])
.map_or(self.input.len(), |end| current.0 + end);
self.slice(SourcePosition(start)..SourcePosition(end))
}
Expand Down Expand Up @@ -720,9 +720,7 @@ fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) {
// If there is a source map directive, extract the URL.
if contents.starts_with(directive) || contents.starts_with(directive_old) {
let contents = &contents[directive.len()..];
tokenizer.source_map_url = contents
.split(|c| c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n')
.next()
tokenizer.source_map_url = contents.split([' ', '\t', '\x0C', '\r', '\n']).next();
}

let directive = "# sourceURL=";
Expand All @@ -731,9 +729,7 @@ fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) {
// If there is a source map directive, extract the URL.
if contents.starts_with(directive) || contents.starts_with(directive_old) {
let contents = &contents[directive.len()..];
tokenizer.source_url = contents
.split(|c| c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n')
.next()
tokenizer.source_url = contents.split([' ', '\t', '\x0C', '\r', '\n']).next()
}
}

Expand Down
Loading