Browse Source

Add parser

main
raffitz 3 years ago
parent
commit
eda539ebc4
Signed by: raffitz
GPG Key ID: BB3596BD0A31252D
  1. 163
      src/main.rs

163
src/main.rs

@ -1,5 +1,6 @@
use clap::{crate_authors, crate_description, crate_name, crate_version, App, Arg}; use clap::{crate_authors, crate_description, crate_name, crate_version, App, Arg};
use logos::{Lexer, Logos}; use logos::Logos;
use pomelo::pomelo;
use std::fs; use std::fs;
use std::io::{Error, ErrorKind, Read}; use std::io::{Error, ErrorKind, Read};
@ -17,12 +18,12 @@ fn io_error(err: Error, path: &str) -> String {
} }
} }
fn read_var(lex: &mut Lexer<Token>) -> Option<char> { pomelo! {
lex.slice().chars().next() %include {
} use logos::{Lexer, Logos};
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum FunctionType{ pub enum FunctionType{
Sin, Sin,
Cos, Cos,
Tan, Tan,
@ -36,82 +37,69 @@ enum FunctionType{
Log(f64), Log(f64),
} }
#[derive(Logos, Debug, PartialEq)] fn read_var(lex: &mut Lexer<Token>) -> Option<char> {
enum Token { lex.slice().chars().next()
#[regex("s|x|y|z|r|ρ|θ|φ", read_var)] }
Var(char), }
#[token("e", |_| std::f64::consts::E)]
#[token("pi", |_| std::f64::consts::PI)] %token #[derive(Logos, Debug, PartialEq)]
#[regex("pi/2\\s", |_| std::f64::consts::FRAC_PI_2)] pub enum Token {};
#[regex("pi/3\\s", |_| std::f64::consts::FRAC_PI_3)]
#[regex("pi/4\\s", |_| std::f64::consts::FRAC_PI_4)] %type #[regex("s|x|y|z|r|ρ|θ|φ", read_var)] Var char;
#[regex("pi/6\\s", |_| std::f64::consts::FRAC_PI_6)]
#[regex("pi/8\\s", |_| std::f64::consts::FRAC_PI_8)] %type #[token("e", |_| std::f64::consts::E)] #[token("pi", |_| std::f64::consts::PI)] #[regex("pi/2\\s", |_| std::f64::consts::FRAC_PI_2)] #[regex("pi/3\\s", |_| std::f64::consts::FRAC_PI_3)] #[regex("pi/4\\s", |_| std::f64::consts::FRAC_PI_4)] #[regex("pi/6\\s", |_| std::f64::consts::FRAC_PI_6)] #[regex("pi/8\\s", |_| std::f64::consts::FRAC_PI_8)] #[token("2pi", |_| std::f64::consts::TAU)] #[token("π", |_| std::f64::consts::PI)] #[regex("π/2\\s", |_| std::f64::consts::FRAC_PI_2)] #[regex("π/3\\s", |_| std::f64::consts::FRAC_PI_3)] #[regex("π/4\\s", |_| std::f64::consts::FRAC_PI_4)] #[regex("π/6\\s", |_| std::f64::consts::FRAC_PI_6)] #[regex("π/8\\s", |_| std::f64::consts::FRAC_PI_8)] #[token("", |_| std::f64::consts::TAU)] #[token("tau", |_| std::f64::consts::TAU)] #[token("τ", |_| std::f64::consts::TAU)] #[regex("√2\\s", |_| std::f64::consts::SQRT_2)] #[regex(r"[+-]?(?:\d*\.)?\d+", |lex| lex.slice().parse())] Float f64;
#[token("2pi", |_| std::f64::consts::TAU)]
#[token("π", |_| std::f64::consts::PI)] %type #[token("+")] Sum;
#[regex("π/2\\s", |_| std::f64::consts::FRAC_PI_2)] %type #[token("-")] Subtraction;
#[regex("π/3\\s", |_| std::f64::consts::FRAC_PI_3)] %type #[token("/")] Quotient;
#[regex("π/4\\s", |_| std::f64::consts::FRAC_PI_4)] %type #[token("*")] Product;
#[regex("π/6\\s", |_| std::f64::consts::FRAC_PI_6)] %type #[token("^")] Power;
#[regex("π/8\\s", |_| std::f64::consts::FRAC_PI_8)]
#[token("", |_| std::f64::consts::TAU)] %type #[regex("=|<|>|≤|≥", read_var)] #[regex("<=", |_| '≤')] #[regex(">=", |_| '≥')] Qualifier char;
#[token("tau", |_| std::f64::consts::TAU)]
#[token("τ", |_| std::f64::consts::TAU)] %type #[regex("⋀|⋁|⊻|⊼|⊽", read_var)] #[regex("⋂|∧|and|AND|&&", |_| '⋀')] #[regex("∪|∨|v|or|OR|\\|\\|", |_| '⋁')] #[regex("⩒|⩛|⊕|⩡|xor|XOR", |_| '⊻')] #[regex("⩃|nand|NAND", |_| '⊼')] #[regex("⩂|nor|NOR", |_| '⊽')] Junction char;
#[regex("√2\\s", |_| std::f64::consts::SQRT_2)]
#[regex(r"[+-]?(?:\d*\.)?\d+", |lex| lex.slice().parse())] %type #[token("sin", |_| FunctionType::Sin)] #[token("cos", |_| FunctionType::Cos)] #[token("tan", |_| FunctionType::Tan)] #[token("asin", |_| FunctionType::Asin)] #[token("acos", |_| FunctionType::Acos)] #[token("atan", |_| FunctionType::Atan)] #[token("sign", |_| FunctionType::Sign)] #[token("abs", |_| FunctionType::Abs)] #[token("sqrt", |_| FunctionType::Sqrt)] #[token("", |_| FunctionType::Sqrt)] #[token("exp", |_| FunctionType::Exp)] #[token("ln", |_| FunctionType::Log(1.0))] #[token("log", |_| FunctionType::Log(std::f64::consts::LN_10))] Function FunctionType;
Float(f64),
%type #[token("(")] LParen;
#[regex("\\+|-|/|\\*|\\^", read_var)] %type #[token(")")] RParen;
Operator(char),
%type #[regex(r"\n+")] LineEnd;
#[regex("=|<|>|≤|≥", read_var)]
#[regex("<=", |_| '≤')] %type #[regex("\\\\n", logos::skip)] #[regex("#.*\\n", logos::skip)] #[regex("//.*\\n", logos::skip)] #[regex(r"[ \t\f]+", logos::skip)] #[error] Error;
#[regex(">=", |_| '≥')]
Qualifier(char), %left Junction;
%nonassoc Qualifier;
#[regex("⋀|⋁|⊻|⊼|⊽", read_var)] %left Sum Subtraction;
#[regex("⋂|∧|and|AND|&&", |_| '⋀')] %left Product Quotient;
#[regex("∪|∨|v|or|OR|\\|\\|", |_| '⋁')] %right Power;
#[regex("⩒|⩛|⊕|⩡|xor|XOR", |_| '⊻')] %right Function;
#[regex("⩃|nand|NAND", |_| '⊼')] %left LineEnd;
#[regex("⩂|nor|NOR", |_| '⊽')]
Junction(char), input ::= limit LineEnd limit LineEnd limit LineEnd metajuncture;
input ::= limit LineEnd limit LineEnd limit LineEnd metajuncture LineEnd;
#[token("sin", |_| FunctionType::Sin)] input ::= LineEnd limit LineEnd limit LineEnd limit LineEnd metajuncture LineEnd;
#[token("cos", |_| FunctionType::Cos)] input ::= LineEnd limit LineEnd limit LineEnd limit LineEnd metajuncture;
#[token("tan", |_| FunctionType::Tan)] limit ::= expr Qualifier Var Qualifier expr;
#[token("asin", |_| FunctionType::Asin)] quality ::= expr Qualifier expr;
#[token("acos", |_| FunctionType::Acos)] juncture ::= quality;
#[token("atan", |_| FunctionType::Atan)] juncture ::= juncture Junction juncture;
#[token("sign", |_| FunctionType::Sign)] metajuncture ::= juncture;
#[token("abs", |_| FunctionType::Abs)] metajuncture ::= metajuncture LineEnd metajuncture;
#[token("sqrt", |_| FunctionType::Sqrt)]
#[token("", |_| FunctionType::Sqrt)] expr ::= expr Sum expr;
#[token("exp", |_| FunctionType::Exp)] expr ::= expr Subtraction expr;
#[token("ln", |_| FunctionType::Log(1.0))] expr ::= expr Product expr;
#[token("log", |_| FunctionType::Log(std::f64::consts::LN_10))] expr ::= expr Quotient expr;
Function(FunctionType), expr ::= expr Power expr;
expr ::= Function expr;
#[token("(")] expr ::= LParen expr RParen;
LParen, expr ::= Var;
expr ::= Float;
#[token(")")]
RParen,
#[token("\n")]
LineEnd,
#[regex("#.*\\n", logos::skip)]
#[regex("//.*\\n", logos::skip)]
#[regex(r"[ \t\f]+", logos::skip)]
Whitespace,
#[error]
Error,
} }
fn main() { fn main() -> Result<(), ()> {
let matches = App::new(crate_name!()) let matches = App::new(crate_name!())
.version(crate_version!()) .version(crate_version!())
.author(crate_authors!()) .author(crate_authors!())
@ -151,20 +139,27 @@ fn main() {
) )
.get_matches(); .get_matches();
let scale = matches.value_of("scale").map(|s| s.parse::<i32>().unwrap()); //let scale = matches.value_of("scale").map(|s| s.parse::<i32>().unwrap());
let mut object_description = fs::File::open(matches.value_of("FILE").unwrap()).unwrap(); let mut object_description = fs::File::open(matches.value_of("FILE").unwrap()).unwrap();
let mut data = String::new(); let mut data = String::new();
if let Ok(size) = object_description.read_to_string(&mut data) { if let Ok(_) = object_description.read_to_string(&mut data) {
let lex = Token::lexer(&data); let lex = parser::Token::lexer(&data);
let mut p = parser::Parser::new();
for token in lex { for token in lex {
print!("{:?} ", token); p.parse(token)?;
//print!("{:?} ", token);
} }
println!("\nRead {} bytes, scale is {}", size, scale.unwrap_or(1)); let tree = p.end_of_input()?;
println!("{:?}",tree);
//println!("\nRead {} bytes, scale is {}", size, scale.unwrap_or(1));
} }
Ok(())
//println!("Scale was read and is <{}>", scale.unwrap_or(1)); //println!("Scale was read and is <{}>", scale.unwrap_or(1));
} }

Loading…
Cancel
Save