Skip to content

Commit

Permalink
Merge pull request #50 from hiratara/fix-free-vars
Browse files Browse the repository at this point in the history
[BUG] Fix convert_classic_tokens
  • Loading branch information
ljedrz authored Aug 17, 2023
2 parents a369ffd + 190c29f commit dfcf4e7
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 4 deletions.
29 changes: 25 additions & 4 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use self::Token::*;
pub use crate::term::Notation::*;
use crate::term::Term::*;
use crate::term::{abs, app, Notation, Term};
use std::collections::VecDeque;
use std::error::Error;
use std::fmt;

Expand Down Expand Up @@ -138,12 +139,12 @@ pub fn tokenize_cla(input: &str) -> Result<Vec<CToken>, ParseError> {

#[doc(hidden)]
pub fn convert_classic_tokens(tokens: &[CToken]) -> Vec<Token> {
_convert_classic_tokens(tokens, &mut Vec::with_capacity(tokens.len()), &mut 0)
_convert_classic_tokens(tokens, &mut VecDeque::with_capacity(tokens.len()), &mut 0)
}

fn _convert_classic_tokens<'t>(
tokens: &'t [CToken],
stack: &mut Vec<&'t str>,
stack: &mut VecDeque<&'t str>,
pos: &mut usize,
) -> Vec<Token> {
let mut output = Vec::with_capacity(tokens.len() - *pos);
Expand All @@ -153,7 +154,7 @@ fn _convert_classic_tokens<'t>(
match *token {
CLambda(ref name) => {
output.push(Lambda);
stack.push(name);
stack.push_back(name);
inner_stack_count += 1;
}
CLparen => {
Expand All @@ -170,7 +171,10 @@ fn _convert_classic_tokens<'t>(
if let Some(index) = stack.iter().rev().position(|t| t == name) {
output.push(Number(index + 1))
} else {
output.push(Number(stack.len() + 1))
// a new free variable
stack.push_front(name);
// index of the last element + 1
output.push(Number(stack.len()))
}
}
}
Expand Down Expand Up @@ -358,6 +362,23 @@ mod tests {
);
}

#[test]
fn tokenization_success_classic_with_free_variables() {
let blc_dbr = "12";
let blc_cla = parse(blc_dbr, DeBruijn).unwrap().to_string();

let tokens_cla = tokenize_cla(&blc_cla);
let tokens_dbr = tokenize_dbr(blc_dbr);

assert!(tokens_cla.is_ok());
assert!(tokens_dbr.is_ok());

assert_eq!(
convert_classic_tokens(&tokens_cla.unwrap()),
tokens_dbr.unwrap()
);
}

#[test]
fn alternative_lambda_parsing() {
assert_eq!(
Expand Down
33 changes: 33 additions & 0 deletions tests/parser.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
use lambda_calculus::{
parse,
parser::ParseError,
term::Notation::{Classic, DeBruijn},
};

#[test]
fn parse_debruijn_and_classic() -> Result<(), ParseError> {
for (dbr, cla) in [
("12", "a b"),
("λλ21", "λs. λz. s z"),
(
"λ2134(λ3215(λ4321)3215)2134",
"λx. w x y z (λy. w x y z (λz. w x y z) w x y z) w x y z",
),
(
// See: http://alexandria.tue.nl/repository/freearticles/597619.pdf
"λ2(λ421(5(λ4127)λ8))67",
// the free variable list is ..ywzfba
"λx. a (λt. b x t (f (λu. a u t z) λs. w)) w y",
),
(
// apply `plus zero one` to `s` and `z`
"(λλλλ42(321))(λλ1)(λλ21)12",
"(λm.λn.λs.λz. m s (n s z)) (λs.λz. z) (λs.λz. s z) s z",
),
] {
let term_dbr = parse(dbr, DeBruijn)?;
let term_cla = parse(cla, Classic)?;
assert_eq!(term_dbr, term_cla);
}
Ok(())
}
15 changes: 15 additions & 0 deletions tests/reduction.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
extern crate lambda_calculus as lambda;

use lambda::combinators::{I, O};
use lambda::parser::ParseError;
use lambda::*;
use std::thread;

Expand Down Expand Up @@ -48,6 +49,20 @@ fn reduction_cbv() {
assert_eq!(expr, I());
}

#[test]
fn reduction_zero_plus_one() -> Result<(), ParseError> {
let mut expr = parse(
"(λm.λn.λs.λz. m s (n s z)) (λs.λz. z) (λs.λz. s z) s z",
Classic,
)?;
expr.reduce(CBV, 2);
assert_eq!(expr, parse("(λλ(λλ1)2((λλ21)21))12", DeBruijn)?);
expr.reduce(CBV, 6);
assert_eq!(expr, parse("12", DeBruijn)?);
assert_eq!(expr.to_string(), "a b");
Ok(())
}

#[test]
#[ignore]
fn reduction_huge() {
Expand Down

0 comments on commit dfcf4e7

Please sign in to comment.