Skip to content

Commit 81a63e6

Browse files
committed
testing out with changes to logos 0.11-rc2.
1 parent 9dfc367 commit 81a63e6

File tree

9 files changed

+57
-139
lines changed

9 files changed

+57
-139
lines changed

Cargo.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@ edition = "2018"
99
[dependencies]
1010
lalrpop = "0.18"
1111
lalrpop-util = "0.18"
12-
logos = "0.10.0"
12+
logos = "0.11.0-rc2"
1313
regex = "1"
14+
logos-derive = "0.11.0-rc2"
1415
codespan-reporting = "0.9"
1516
structopt = "0.3.12"
1617

src/codespan.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ pub fn codespan<'a>(
5151
.with_message("Extra token"),
5252
User { error } => Diagnostic::error()
5353
.with_message("Invalid token")
54-
.with_labels(vec![Label::primary(file_id, error.0.clone())])
54+
.with_labels(vec![Label::primary(file_id, *error..*error)])
5555
.with_message("Invalid token"),
5656
};
5757

src/error.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
use crate::token_wrap::*;
1+
use crate::lex::Token;
22

3-
pub type Error<'a> = lalrpop_util::ParseError<usize, Token<'a>, LexicalError>;
3+
pub type Error<'a> = lalrpop_util::ParseError<usize, Token<'a>, usize>;
44

55
#[derive(Debug)]
66
pub enum MainError {

src/lex.rs

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,9 @@
1+
pub use logos::Lexer;
12
use logos::Logos;
23

3-
// Notably absent from the above, present in the below are
4-
// Whitespace, EOF, LexError
5-
#[derive(Logos, Debug)]
6-
pub enum Token {
7-
#[end]
8-
EOF,
9-
4+
#[derive(Logos, Debug, Clone, PartialEq)]
5+
#[logos(trivia = r"(\p{Whitespace}+|#.*\n)")]
6+
pub enum Token<'a> {
107
#[token = "."]
118
Dot,
129

@@ -87,23 +84,17 @@ pub enum Token {
8784
// \x{1d62}-\x{1d6a}
8885
//
8986
// FancyNameAscii ↔ FancyNameUnicode
90-
#[regex = r"[\\][a-zA-Z][_a-zA-Z0-9]*"]
91-
FancyNameAscii,
92-
#[regex = r"[a-zA-Z\p{Greek}\x{1d49c}-\x{1d59f}\x{2100}-\x{214f}][_a-zA-Z0-9\x{207f}-\x{2089}\x{2090}-\x{209c}\x{1d62}-\x{1d6a}]*"]
93-
Name,
87+
#[regex(r"[\\][a-zA-Z][_a-zA-Z0-9]*", |lex| lex.slice())]
88+
FancyNameAscii(&'a str),
89+
#[regex(r"[a-zA-Z\p{Greek}\x{1d49c}-\x{1d59f}\x{2100}-\x{214f}][_a-zA-Z0-9\x{207f}-\x{2089}\x{2090}-\x{209c}\x{1d62}-\x{1d6a}]*", |lex| lex.slice())]
90+
Name(&'a str),
9491

9592
#[token = ":"]
9693
Colon,
9794

9895
#[token = ";"]
9996
Semi,
10097

101-
#[regex = r"#.*\n"]
102-
Comment,
103-
104-
#[regex = r"\p{Whitespace}+"]
105-
Whitespace,
106-
10798
#[error]
10899
LexError,
109100
}

src/main.rs

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ mod ast;
22
mod codespan;
33
mod error;
44
mod lex;
5-
mod token_wrap;
65

76
#[cfg(test)]
87
mod test;
@@ -12,9 +11,10 @@ mod test_util;
1211
use codespan_reporting::term::termcolor::StandardStream;
1312
use codespan_reporting::term::{self, ColorArg};
1413
use error::*;
14+
use logos::Logos;
1515
use std::io::Read;
1616
use structopt::StructOpt;
17-
use token_wrap::*;
17+
1818
#[derive(Debug, StructOpt)]
1919
#[structopt(name = "prop")]
2020
pub struct Opts {
@@ -59,8 +59,15 @@ fn main() -> Result<(), MainError> {
5959

6060
// Not really how i'd like this to be.
6161
buf.read_to_string(&mut s)?;
62-
let lexer = Tokens::from_string(&s);
63-
let parse_result = parser::propParser::new().parse(lexer);
62+
63+
let lex = lex::Token::lexer(&s).spanned();
64+
let parse_result = parser::propParser::new().parse(lex.map(|(t, r)| {
65+
if t == lex::Token::LexError {
66+
Err(r.start)
67+
} else {
68+
Ok((r.start, t, r.end))
69+
}
70+
}));
6471

6572
match parse_result {
6673
Err(e) => {

src/prop.lalrpop

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
1-
use crate::token_wrap;
1+
use crate::lex::Token;
22
use crate::ast::{Prop, Expr, Binding, Typ};
33
use std::rc::Rc;
4-
use token_wrap::*;
54
grammar<'a>;
65

76
extern {
87
type Location = usize;
9-
type Error = LexicalError;
8+
type Error = usize;
109

1110
enum Token<'a> {
1211
"⊥" => Token::Bot,
@@ -23,10 +22,16 @@ extern {
2322
")" => Token::RParen,
2423
":" => Token::Colon,
2524
";" => Token::Semi,
26-
name => Token::Name(<&'a str>),
25+
fancy_name_unicode => Token::Name(<&'a str>),
26+
fancy_name_ascii => Token::FancyNameAscii(<&'a str>),
2727
}
2828
}
2929

30+
name: &'a str = {
31+
fancy_name_unicode,
32+
fancy_name_ascii,
33+
}
34+
3035
pub prop = Semi<Binding>;
3136

3237
Binding: Rc<Binding> = {

src/test.rs

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
use crate::error::*;
2-
use crate::token_wrap::*;
2+
use crate::lex;
33
use crate::{parser, test_util};
4+
use logos::Logos;
45

56
use unindent::unindent;
67

@@ -108,16 +109,24 @@ fn bad_ascii() -> Result<(), &'static str> {
108109

109110
let mut num_fail = 0;
110111
for s in invalid_source.iter() {
111-
let lexer = Tokens::from_string(&s);
112-
match parser::propParser::new().parse(lexer) {
112+
let lex = lex::Token::lexer(&s).spanned();
113+
let parse_result = parser::propParser::new().parse(lex.map(|(t, r)| {
114+
if t == lex::Token::LexError {
115+
Err(r.start)
116+
} else {
117+
Ok((r.start, t, r.end))
118+
}
119+
}));
120+
121+
match parse_result {
113122
Ok(_) => {
114123
// bad
115124
println!("parsed but shouldn't: {}", s);
116125
num_fail += 1;
117126
}
118127
Err(e) => {
119128
// good
120-
println!("expected error: {}", e);
129+
println!("expected error: {:?}", e);
121130
()
122131
}
123132
}

src/test_util.rs

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,25 @@
11
use crate::codespan;
22
use crate::error::*;
3+
use crate::lex;
34
use crate::parser;
4-
use crate::token_wrap::*;
55
use codespan_reporting::term;
66
use codespan_reporting::term::termcolor::{ColorChoice, StandardStream};
7+
use logos::Logos;
78

89
pub fn do_test<'a>(sources: &[&'a str]) -> Result<(), Vec<(&'a str, Error<'a>)>> {
910
let (_pass, fail): (Vec<_>, Vec<_>) = sources
1011
.iter()
1112
.enumerate()
1213
.map(|(index, s)| {
13-
(
14-
index,
15-
parser::propParser::new().parse(Tokens::from_string(s)),
16-
)
14+
(index, {
15+
parser::propParser::new().parse(lex::Token::lexer(&s).spanned().map(|(t, r)| {
16+
if t == lex::Token::LexError {
17+
Err(r.start)
18+
} else {
19+
Ok((r.start, t, r.end))
20+
}
21+
}))
22+
})
1723
})
1824
.partition(|(_, r)| r.is_ok());
1925
if fail.is_empty() {
@@ -39,8 +45,6 @@ pub fn expect_success<'a>(result: Result<(), Vec<(&'a str, Error<'a>)>>) -> Resu
3945
let config = codespan_reporting::term::Config::default();
4046
let (files, diagnostic) = codespan::codespan("foo", source, error);
4147

42-
eprintln!("capture stderr?");
43-
println!("capture stdout?");
4448
term::emit(&mut writer.lock(), &config, &files, &diagnostic)?;
4549
}
4650
Err(MainError::SomethingWentAwryAndStuffWasPrinted)

src/token_wrap.rs

Lines changed: 0 additions & 99 deletions
This file was deleted.

0 commit comments

Comments
 (0)