Skip to content

Commit

Permalink
Update logos to 0.14.
Browse files Browse the repository at this point in the history
  • Loading branch information
tmpfs committed Feb 9, 2024
1 parent ab3442a commit 771845f
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 49 deletions.
9 changes: 7 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ license = "MIT OR Apache-2.0"

[dependencies]
thiserror = "1"
logos = "0.12"
logos = { version = "0.14", features = ["export_derive"] }
uriparse = "0.6.4"
time = { version = "0.3.19", features = ["parsing", "formatting"] }
unicode-segmentation="1"
Expand All @@ -22,7 +22,12 @@ base64 = "0.21.0"

[features]
default = ["zeroize"]
serde = ["dep:serde", "time/serde-human-readable", "language-tags?/serde", "uriparse/serde"]
serde = [
"dep:serde",
"time/serde-human-readable",
"language-tags?/serde",
"uriparse/serde",
]
zeroize = ["dep:zeroize"]
mime = ["dep:mime"]
language-tags = ["dep:language-tags"]
Expand Down
14 changes: 14 additions & 0 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,14 @@
use thiserror::Error;

/// Error lexing a vcard string.
#[derive(Debug, Error, PartialEq, Clone, Default)]
#[doc(hidden)]
pub enum LexError {
/// Generic lex error.
#[default]
#[error("vcard lex error")]
Other,
}

/// Errors generated by the vCard library.
#[derive(Debug, Error)]
Expand Down Expand Up @@ -170,4 +180,8 @@ pub enum Error {
/// Error generated decoding from base64.
#[error(transparent)]
Base64(#[from] base64::DecodeError),

/// Error generated during lexing.
#[error(transparent)]
LexError(#[from] LexError),
}
2 changes: 1 addition & 1 deletion src/iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl<'s> VcardIterator<'s> {
let mut lex = self.parser.lexer();
lex.bump(offset);
while let Some(first) = lex.next() {
if first == Token::NewLine {
if first == Ok(Token::NewLine) {
continue;
} else {
return self.parser.parse_one(&mut lex, Some(first));
Expand Down
96 changes: 50 additions & 46 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,15 @@ use language_tags::LanguageTag;
use mime::Mime;

use crate::{
error::LexError,
escape_control, helper::*, name::*, parameter::*, property::*,
unescape_value, Error, Result, Vcard,
};

type LexResult<T> = std::result::Result<T, LexError>;

#[derive(Logos, Debug, PartialEq)]
#[logos(error = LexError)]
pub(crate) enum Token {
#[regex("(?i:BEGIN:VCARD)")]
Begin,
Expand Down Expand Up @@ -67,19 +71,19 @@ pub(crate) enum Token {
#[regex("(?i:\\\\n)")]
EscapedNewLine,

#[regex("\\r?\\n")]
#[regex("\\r?\\n", priority = 3)]
NewLine,

#[regex("[[:blank:]]", priority = 2)]
WhiteSpace,

#[regex("[[:cntrl:]]")]
#[regex("[[:cntrl:]]", priority = 1)]
Control,

#[regex("(?i:END:VCARD)")]
End,

#[error]
#[regex(".", priority = 0)]
Text,
}

Expand All @@ -103,7 +107,7 @@ impl<'s> VcardParser<'s> {
while let Some(first) = lex.next() {
// Allow leading newlines and newlines between
// vCard definitions
if first == Token::NewLine {
if first == Ok(Token::NewLine) {
continue;
}

Expand All @@ -128,7 +132,7 @@ impl<'s> VcardParser<'s> {
pub(crate) fn parse_one(
&self,
lex: &mut Lexer<'_, Token>,
first: Option<Token>,
first: Option<LexResult<Token>>,
) -> Result<(Vcard, Range<usize>)> {
self.assert_token(first.as_ref(), &[Token::Begin])?;
self.assert_token(lex.next().as_ref(), &[Token::NewLine])?;
Expand All @@ -151,10 +155,10 @@ impl<'s> VcardParser<'s> {
) -> Result<()> {
while let Some(first) = lex.next() {
//println!("{:#?} {}", first, &self.source[lex.span()]);
if first == Token::End {
if first == Ok(Token::End) {
break;
}
if let Token::Version = first {
if let Ok(Token::Version) = first {
return Err(Error::VersionMisplaced);
}

Expand All @@ -181,7 +185,7 @@ impl<'s> VcardParser<'s> {
fn parse_property(
&self,
lex: &mut Lexer<'_, Token>,
token: Token,
token: LexResult<Token>,
card: &mut Vcard,
) -> Result<()> {
let mut group: Option<String> = None;
Expand All @@ -197,7 +201,7 @@ impl<'s> VcardParser<'s> {
let delimiter = lex.next();

if let Some(delimiter) = delimiter {
if delimiter == Token::ParameterDelimiter {
if delimiter == Ok(Token::ParameterDelimiter) {
let parameters = self.parse_parameters(lex, name)?;
self.parse_property_by_name(
lex,
Expand All @@ -207,7 +211,7 @@ impl<'s> VcardParser<'s> {
Some(parameters),
group,
)?;
} else if delimiter == Token::PropertyDelimiter {
} else if delimiter == Ok(Token::PropertyDelimiter) {
self.parse_property_by_name(
lex, token, card, name, None, group,
)?;
Expand Down Expand Up @@ -245,13 +249,13 @@ impl<'s> VcardParser<'s> {
) -> Result<Parameters> {
let property_upper_name = name.to_uppercase();
let mut params: Parameters = Default::default();
let mut next: Option<Token> = lex.next();
let mut next: Option<LexResult<Token>> = lex.next();

while let Some(token) = next.take() {
if token == Token::ParameterKey
|| token == Token::ExtensionName
|| token == Token::TimeZone
|| token == Token::Geo
if token == Ok(Token::ParameterKey)
|| token == Ok(Token::ExtensionName)
|| token == Ok(Token::TimeZone)
|| token == Ok(Token::Geo)
{
let source = lex.source();
let span = lex.span();
Expand All @@ -266,7 +270,7 @@ impl<'s> VcardParser<'s> {
let (value, next_token, quoted) =
self.parse_parameter_value(lex)?;

if token == Token::ExtensionName {
if token == Ok(Token::ExtensionName) {
self.add_extension_parameter(
parameter_name,
value,
Expand Down Expand Up @@ -408,9 +412,9 @@ impl<'s> VcardParser<'s> {
}
}

if next_token == Token::PropertyDelimiter {
if next_token == Ok(Token::PropertyDelimiter) {
break;
} else if next_token == Token::ParameterKey {
} else if next_token == Ok(Token::ParameterKey) {
next = Some(next_token);
} else {
next = lex.next();
Expand All @@ -426,39 +430,39 @@ impl<'s> VcardParser<'s> {
fn parse_parameter_value<'a>(
&self,
lex: &'a mut Lexer<'_, Token>,
) -> Result<(String, Token, bool)> {
) -> Result<(String, LexResult<Token>, bool)> {
let mut first_range: Option<Range<usize>> = None;
let mut quoted = false;
let mut is_folded_or_escaped = false;

while let Some(mut token) = lex.next() {
let span = lex.span();

if token == Token::Control {
if token == Ok(Token::Control) {
return Err(Error::ControlCharacter(escape_control(
lex.slice(),
)));
}

if token == Token::FoldedLine
|| token == Token::EscapedNewLine
|| token == Token::EscapedComma
|| token == Token::EscapedBackSlash
if token == Ok(Token::FoldedLine)
|| token == Ok(Token::EscapedNewLine)
|| token == Ok(Token::EscapedComma)
|| token == Ok(Token::EscapedBackSlash)
{
is_folded_or_escaped = true;
}

let completed = if first_range.is_some() && quoted {
token == Token::DoubleQuote
token == Ok(Token::DoubleQuote)
} else {
token == Token::PropertyDelimiter
|| token == Token::ParameterDelimiter
token == Ok(Token::PropertyDelimiter)
|| token == Ok(Token::ParameterDelimiter)
//|| token == Token::ParameterKey
};

if first_range.is_none() {
first_range = Some(span.clone());
if token == Token::DoubleQuote {
if token == Ok(Token::DoubleQuote) {
quoted = true;
}
}
Expand All @@ -476,13 +480,13 @@ impl<'s> VcardParser<'s> {

// Must consumer the next token
if quoted {
token = if let Some(token) = lex.next() {
token = if let Some(Ok(token)) = lex.next() {
if token != Token::PropertyDelimiter
&& token != Token::ParameterDelimiter
{
return Err(Error::DelimiterExpected);
}
token
Ok(token)
} else {
return Err(Error::TokenExpected);
};
Expand All @@ -504,7 +508,7 @@ impl<'s> VcardParser<'s> {
fn parse_property_by_name(
&self,
lex: &mut Lexer<'_, Token>,
token: Token,
token: LexResult<Token>,
card: &mut Vcard,
name: &str,
parameters: Option<Parameters>,
Expand All @@ -514,7 +518,7 @@ impl<'s> VcardParser<'s> {

let upper_name = name.to_uppercase();

if token == Token::ExtensionName || upper_name.starts_with("X-") {
if token == Ok(Token::ExtensionName) || upper_name.starts_with("X-") {
self.parse_extension_property_by_name(
card, name, value, parameters, group,
)?;
Expand Down Expand Up @@ -1006,22 +1010,22 @@ impl<'s> VcardParser<'s> {
first_range = Some(span.clone());
}

if token == Token::Control {
if token == Ok(Token::Control) {
return Err(Error::ControlCharacter(escape_control(
lex.slice(),
)));
}

if token == Token::FoldedLine
|| token == Token::EscapedSemiColon
|| token == Token::EscapedComma
|| token == Token::EscapedNewLine
|| token == Token::EscapedBackSlash
if token == Ok(Token::FoldedLine)
|| token == Ok(Token::EscapedSemiColon)
|| token == Ok(Token::EscapedComma)
|| token == Ok(Token::EscapedNewLine)
|| token == Ok(Token::EscapedBackSlash)
{
needs_transform = true;
}

if token == Token::NewLine {
if token == Ok(Token::NewLine) {
last_range = Some(span);
break;
}
Expand All @@ -1033,18 +1037,18 @@ impl<'s> VcardParser<'s> {
if needs_transform {
let mut value = String::new();
for (token, span) in tokens {
if token == Token::FoldedLine {
if token == Ok(Token::FoldedLine) {
continue;
} else if token == Token::EscapedComma {
} else if token == Ok(Token::EscapedComma) {
value.push(',');
continue;
} else if token == Token::EscapedSemiColon {
} else if token == Ok(Token::EscapedSemiColon) {
value.push(';');
continue;
} else if token == Token::EscapedNewLine {
} else if token == Ok(Token::EscapedNewLine) {
value.push('\n');
continue;
} else if token == Token::EscapedBackSlash {
} else if token == Ok(Token::EscapedBackSlash) {
value.push('\\');
continue;
}
Expand Down Expand Up @@ -1110,10 +1114,10 @@ impl<'s> VcardParser<'s> {
/// Assert we have an expected token.
fn assert_token(
&self,
value: Option<&Token>,
value: Option<&LexResult<Token>>,
expected: &[Token],
) -> Result<()> {
if let Some(value) = value {
if let Some(Ok(value)) = value {
if expected.contains(value) {
Ok(())
} else {
Expand Down

0 comments on commit 771845f

Please sign in to comment.