Browse Source

Add parser

main
raffitz 3 years ago
parent
commit
eda539ebc4
Signed by: raffitz
GPG Key ID: BB3596BD0A31252D
  1. 189
      src/main.rs

189
src/main.rs

@ -1,5 +1,6 @@
use clap::{crate_authors, crate_description, crate_name, crate_version, App, Arg}; use clap::{crate_authors, crate_description, crate_name, crate_version, App, Arg};
use logos::{Lexer, Logos}; use logos::Logos;
use pomelo::pomelo;
use std::fs; use std::fs;
use std::io::{Error, ErrorKind, Read}; use std::io::{Error, ErrorKind, Read};
@ -17,101 +18,88 @@ fn io_error(err: Error, path: &str) -> String {
} }
} }
fn read_var(lex: &mut Lexer<Token>) -> Option<char> { pomelo! {
lex.slice().chars().next() %include {
} use logos::{Lexer, Logos};
#[derive(Debug, PartialEq)]
pub enum FunctionType{
Sin,
Cos,
Tan,
Asin,
Acos,
Atan,
Sign,
Abs,
Sqrt,
Exp,
Log(f64),
}
#[derive(Debug, PartialEq)] fn read_var(lex: &mut Lexer<Token>) -> Option<char> {
enum FunctionType{ lex.slice().chars().next()
Sin, }
Cos, }
Tan,
Asin,
Acos, %token #[derive(Logos, Debug, PartialEq)]
Atan, pub enum Token {};
Sign,
Abs, %type #[regex("s|x|y|z|r|ρ|θ|φ", read_var)] Var char;
Sqrt,
Exp, %type #[token("e", |_| std::f64::consts::E)] #[token("pi", |_| std::f64::consts::PI)] #[regex("pi/2\\s", |_| std::f64::consts::FRAC_PI_2)] #[regex("pi/3\\s", |_| std::f64::consts::FRAC_PI_3)] #[regex("pi/4\\s", |_| std::f64::consts::FRAC_PI_4)] #[regex("pi/6\\s", |_| std::f64::consts::FRAC_PI_6)] #[regex("pi/8\\s", |_| std::f64::consts::FRAC_PI_8)] #[token("2pi", |_| std::f64::consts::TAU)] #[token("π", |_| std::f64::consts::PI)] #[regex("π/2\\s", |_| std::f64::consts::FRAC_PI_2)] #[regex("π/3\\s", |_| std::f64::consts::FRAC_PI_3)] #[regex("π/4\\s", |_| std::f64::consts::FRAC_PI_4)] #[regex("π/6\\s", |_| std::f64::consts::FRAC_PI_6)] #[regex("π/8\\s", |_| std::f64::consts::FRAC_PI_8)] #[token("", |_| std::f64::consts::TAU)] #[token("tau", |_| std::f64::consts::TAU)] #[token("τ", |_| std::f64::consts::TAU)] #[regex("√2\\s", |_| std::f64::consts::SQRT_2)] #[regex(r"[+-]?(?:\d*\.)?\d+", |lex| lex.slice().parse())] Float f64;
Log(f64),
} %type #[token("+")] Sum;
%type #[token("-")] Subtraction;
%type #[token("/")] Quotient;
%type #[token("*")] Product;
%type #[token("^")] Power;
#[derive(Logos, Debug, PartialEq)] %type #[regex("=|<|>|≤|≥", read_var)] #[regex("<=", |_| '≤')] #[regex(">=", |_| '≥')] Qualifier char;
enum Token {
#[regex("s|x|y|z|r|ρ|θ|φ", read_var)] %type #[regex("⋀|⋁|⊻|⊼|⊽", read_var)] #[regex("⋂|∧|and|AND|&&", |_| '⋀')] #[regex("∪|∨|v|or|OR|\\|\\|", |_| '⋁')] #[regex("⩒|⩛|⊕|⩡|xor|XOR", |_| '⊻')] #[regex("⩃|nand|NAND", |_| '⊼')] #[regex("⩂|nor|NOR", |_| '⊽')] Junction char;
Var(char),
%type #[token("sin", |_| FunctionType::Sin)] #[token("cos", |_| FunctionType::Cos)] #[token("tan", |_| FunctionType::Tan)] #[token("asin", |_| FunctionType::Asin)] #[token("acos", |_| FunctionType::Acos)] #[token("atan", |_| FunctionType::Atan)] #[token("sign", |_| FunctionType::Sign)] #[token("abs", |_| FunctionType::Abs)] #[token("sqrt", |_| FunctionType::Sqrt)] #[token("", |_| FunctionType::Sqrt)] #[token("exp", |_| FunctionType::Exp)] #[token("ln", |_| FunctionType::Log(1.0))] #[token("log", |_| FunctionType::Log(std::f64::consts::LN_10))] Function FunctionType;
#[token("e", |_| std::f64::consts::E)]
#[token("pi", |_| std::f64::consts::PI)] %type #[token("(")] LParen;
#[regex("pi/2\\s", |_| std::f64::consts::FRAC_PI_2)] %type #[token(")")] RParen;
#[regex("pi/3\\s", |_| std::f64::consts::FRAC_PI_3)]
#[regex("pi/4\\s", |_| std::f64::consts::FRAC_PI_4)] %type #[regex(r"\n+")] LineEnd;
#[regex("pi/6\\s", |_| std::f64::consts::FRAC_PI_6)]
#[regex("pi/8\\s", |_| std::f64::consts::FRAC_PI_8)] %type #[regex("\\\\n", logos::skip)] #[regex("#.*\\n", logos::skip)] #[regex("//.*\\n", logos::skip)] #[regex(r"[ \t\f]+", logos::skip)] #[error] Error;
#[token("2pi", |_| std::f64::consts::TAU)]
#[token("π", |_| std::f64::consts::PI)] %left Junction;
#[regex("π/2\\s", |_| std::f64::consts::FRAC_PI_2)] %nonassoc Qualifier;
#[regex("π/3\\s", |_| std::f64::consts::FRAC_PI_3)] %left Sum Subtraction;
#[regex("π/4\\s", |_| std::f64::consts::FRAC_PI_4)] %left Product Quotient;
#[regex("π/6\\s", |_| std::f64::consts::FRAC_PI_6)] %right Power;
#[regex("π/8\\s", |_| std::f64::consts::FRAC_PI_8)] %right Function;
#[token("", |_| std::f64::consts::TAU)] %left LineEnd;
#[token("tau", |_| std::f64::consts::TAU)]
#[token("τ", |_| std::f64::consts::TAU)] input ::= limit LineEnd limit LineEnd limit LineEnd metajuncture;
#[regex("√2\\s", |_| std::f64::consts::SQRT_2)] input ::= limit LineEnd limit LineEnd limit LineEnd metajuncture LineEnd;
#[regex(r"[+-]?(?:\d*\.)?\d+", |lex| lex.slice().parse())] input ::= LineEnd limit LineEnd limit LineEnd limit LineEnd metajuncture LineEnd;
Float(f64), input ::= LineEnd limit LineEnd limit LineEnd limit LineEnd metajuncture;
limit ::= expr Qualifier Var Qualifier expr;
#[regex("\\+|-|/|\\*|\\^", read_var)] quality ::= expr Qualifier expr;
Operator(char), juncture ::= quality;
juncture ::= juncture Junction juncture;
#[regex("=|<|>|≤|≥", read_var)] metajuncture ::= juncture;
#[regex("<=", |_| '≤')] metajuncture ::= metajuncture LineEnd metajuncture;
#[regex(">=", |_| '≥')]
Qualifier(char), expr ::= expr Sum expr;
expr ::= expr Subtraction expr;
#[regex("⋀|⋁|⊻|⊼|⊽", read_var)] expr ::= expr Product expr;
#[regex("⋂|∧|and|AND|&&", |_| '⋀')] expr ::= expr Quotient expr;
#[regex("∪|∨|v|or|OR|\\|\\|", |_| '⋁')] expr ::= expr Power expr;
#[regex("⩒|⩛|⊕|⩡|xor|XOR", |_| '⊻')] expr ::= Function expr;
#[regex("⩃|nand|NAND", |_| '⊼')] expr ::= LParen expr RParen;
#[regex("⩂|nor|NOR", |_| '⊽')] expr ::= Var;
Junction(char), expr ::= Float;
#[token("sin", |_| FunctionType::Sin)]
#[token("cos", |_| FunctionType::Cos)]
#[token("tan", |_| FunctionType::Tan)]
#[token("asin", |_| FunctionType::Asin)]
#[token("acos", |_| FunctionType::Acos)]
#[token("atan", |_| FunctionType::Atan)]
#[token("sign", |_| FunctionType::Sign)]
#[token("abs", |_| FunctionType::Abs)]
#[token("sqrt", |_| FunctionType::Sqrt)]
#[token("", |_| FunctionType::Sqrt)]
#[token("exp", |_| FunctionType::Exp)]
#[token("ln", |_| FunctionType::Log(1.0))]
#[token("log", |_| FunctionType::Log(std::f64::consts::LN_10))]
Function(FunctionType),
#[token("(")]
LParen,
#[token(")")]
RParen,
#[token("\n")]
LineEnd,
#[regex("#.*\\n", logos::skip)]
#[regex("//.*\\n", logos::skip)]
#[regex(r"[ \t\f]+", logos::skip)]
Whitespace,
#[error]
Error,
} }
fn main() { fn main() -> Result<(), ()> {
let matches = App::new(crate_name!()) let matches = App::new(crate_name!())
.version(crate_version!()) .version(crate_version!())
.author(crate_authors!()) .author(crate_authors!())
@ -151,20 +139,27 @@ fn main() {
) )
.get_matches(); .get_matches();
let scale = matches.value_of("scale").map(|s| s.parse::<i32>().unwrap()); //let scale = matches.value_of("scale").map(|s| s.parse::<i32>().unwrap());
let mut object_description = fs::File::open(matches.value_of("FILE").unwrap()).unwrap(); let mut object_description = fs::File::open(matches.value_of("FILE").unwrap()).unwrap();
let mut data = String::new(); let mut data = String::new();
if let Ok(size) = object_description.read_to_string(&mut data) { if let Ok(_) = object_description.read_to_string(&mut data) {
let lex = Token::lexer(&data); let lex = parser::Token::lexer(&data);
let mut p = parser::Parser::new();
for token in lex { for token in lex {
print!("{:?} ", token); p.parse(token)?;
//print!("{:?} ", token);
} }
println!("\nRead {} bytes, scale is {}", size, scale.unwrap_or(1)); let tree = p.end_of_input()?;
println!("{:?}",tree);
//println!("\nRead {} bytes, scale is {}", size, scale.unwrap_or(1));
} }
Ok(())
//println!("Scale was read and is <{}>", scale.unwrap_or(1)); //println!("Scale was read and is <{}>", scale.unwrap_or(1));
} }

Loading…
Cancel
Save