use itertools::{Itertools, PeekingNext};
use std::fmt;
use std::iter::Peekable;
use std::num::ParseIntError;
use std::str::FromStr;
use libreda_stream_parser::{Lexer, ParserError};
#[derive(Clone, Debug)]
pub enum LefDefParseError {
ParserError(ParserError<char>),
InvalidCharacter,
UnexpectedEndOfFile,
UnexpectedToken(String, String),
UnknownToken(String),
InvalidLiteral(String),
IllegalBusBitChars(char, char),
NotImplemented(&'static str),
UndefinedProperty(String),
ParseIntError(ParseIntError),
Other(&'static str),
}
impl From<ParserError<char>> for LefDefParseError {
fn from(e: ParserError<char>) -> Self {
Self::ParserError(e)
}
}
impl fmt::Display for LefDefParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LefDefParseError::InvalidCharacter => write!(f, "Invalid character."),
LefDefParseError::UnexpectedEndOfFile => write!(f, "Unexpected end of file."),
LefDefParseError::UnexpectedToken(actual, exp) => {
write!(f, "Unexpected token. '{}' instead of '{}'", actual, exp)
}
LefDefParseError::UnknownToken(t) => write!(f, "Unknown token: '{}'.", t),
LefDefParseError::InvalidLiteral(n) => write!(f, "Invalid literal: '{}'.", n),
LefDefParseError::IllegalBusBitChars(a, b) => {
write!(f, "Illegal bus bit chars: '{} {}'.", a, b)
}
LefDefParseError::NotImplemented(n) => write!(f, "Not implemented: '{}'.", n),
LefDefParseError::UndefinedProperty(p) => write!(f, "Undefined property: '{}'.", p),
LefDefParseError::Other(msg) => write!(f, "'{}'.", msg),
LefDefParseError::ParseIntError(e) => write!(f, "Illegal integer: '{}'", e),
LefDefParseError::ParserError(e) => write!(f, "{}", e),
}
}
}
impl From<ParseIntError> for LefDefParseError {
fn from(e: ParseIntError) -> Self {
Self::ParseIntError(e)
}
}
pub struct LefDefLexer {}
impl Lexer for LefDefLexer {
type Char = char;
fn consume_next_token(
&mut self,
iter: &mut (impl Iterator<Item = char> + PeekingNext),
mut output: impl FnMut(char),
) -> Result<(), ParserError<char>> {
loop {
let _n = iter.peeking_take_while(|c| c.is_whitespace()).count();
if let Some(c) = iter.peeking_next(|_| true) {
debug_assert!(!c.is_whitespace());
match c {
'#' => {
iter.peeking_take_while(|&c| c != '\n' && c != '\r').count();
}
'"' | '\'' => {
let quote_char = c;
let mut prev = None;
while let Some(c) = iter.next() {
if prev != Some('\\') && c == quote_char {
break;
}
output(c);
prev = Some(c);
}
return Ok(());
}
_ => {
let mut prev = Some(c);
output(c);
while let Some(c) = iter.next() {
if prev != Some('\\') && c.is_whitespace() {
break;
}
output(c);
prev = Some(c);
}
return Ok(());
}
}
} else {
return Ok(());
}
}
}
}
#[test]
fn test_read_token() {
let data = r#"
# Comment 1
# Comment 2
token1
# Comment 3
token2 token3
"quoted token"
token4
"#;
let mut iter = data.chars().inspect(|c| print!("{}", c)).peekable();
let mut buffer = String::new();
let mut tk = libreda_stream_parser::tokenize(iter, LefDefLexer {});
tk.advance().unwrap();
tk.expect_str("token1").unwrap();
tk.expect_str("token2").unwrap();
tk.expect_str("token3").unwrap();
tk.expect_str("quoted token").unwrap();
tk.expect_str("token4").unwrap();
assert!(tk.current_token_ref().is_none());
}