diff options
author | Chloe Brown <chloe.brown.00@outlook.com> | 2023-03-25 16:52:15 +0000 |
---|---|---|
committer | Chloe Brown <chloe.brown.00@outlook.com> | 2023-03-25 16:52:15 +0000 |
commit | 43463df9899d00f0f86c3b655b84a5d5ce75402d (patch) | |
tree | 4e77c2039149922d82f5df9e1434a72f8c7f88e3 | |
parent | 39d9c956a98a0aecb4e2913d3df0cc8eb0e78f69 (diff) |
Extract parsing and evaluation to new modules.
-rw-r--r-- | obs.ipkg | 2 | ||||
-rw-r--r-- | src/CC/Term/Eval.idr | 42 | ||||
-rw-r--r-- | src/CC/Term/Parse.idr | 146 | ||||
-rw-r--r-- | src/Main.idr | 178 |
4 files changed, 195 insertions, 173 deletions
@@ -12,6 +12,8 @@ main = Main modules = CC.Name , CC.Term + , CC.Term.Eval + , CC.Term.Parse , CC.Term.Pretty , CC.Term.Raw , CC.Thinning diff --git a/src/CC/Term/Eval.idr b/src/CC/Term/Eval.idr new file mode 100644 index 0000000..311bc34 --- /dev/null +++ b/src/CC/Term/Eval.idr @@ -0,0 +1,42 @@ +module CC.Term.Eval + +import CC.Name +import CC.Term +import CC.Thinning + +-- Evaluation ------------------------------------------------------------------ + +export +app : Closure ctx -> Lazy (Value ctx) -> Value ctx +export +eval : Env local global -> Term local -> Value global + +app (Close n env t) v = assert_total $ eval (env :< (n, v)) t + +eval env (Var k n prf) = index env k prf +eval env (Abs n t) = VAbs (Close n env t) +eval env (App t u) = + case eval env t of + Ntrl n => Ntrl $ VApp n (eval env u) + VAbs close => app close (eval env u) +eval env (Let n t u) = eval (env :< (n, eval env t)) u + +-- Quoting --------------------------------------------------------------------- + +export +quote : {ctx : _} -> Value ctx -> Term ctx +quoteNtrl : {ctx : _} -> Neutral ctx -> Term ctx + +quote (Ntrl n) = quoteNtrl n +quote (VAbs close@(Close n _ _)) = + Abs n $ + assert_total $ + quote $ + app (wknClose close (drop id)) (Ntrl $ VVar 0 n $ Here $ reflexive {rel = (~~)}) + +quoteNtrl (VVar k n prf) = Var k n prf +quoteNtrl (VApp n v) = App (quoteNtrl n) (quote v) + +export +normalise : {global : _} -> Env local global -> Term local -> Term global +normalise env t = quote $ eval env t diff --git a/src/CC/Term/Parse.idr b/src/CC/Term/Parse.idr new file mode 100644 index 0000000..cef4f0e --- /dev/null +++ b/src/CC/Term/Parse.idr @@ -0,0 +1,146 @@ +module CC.Term.Parse + +import CC.Name +import CC.Term +import CC.Term.Raw + +import Data.List + +import Text.Lexer +import Text.Parser + +-- Tokens ---------------------------------------------------------------------- + +data CCTokenKind + = TIgnore + | TIdent + -- Keywords + | TLet + -- Symbols + | TLambda + | TPOpen + | TPClose + | TComma + | TFatArrow + | TDef + | TSemi + +%name CCTokenKind k + +Eq CCTokenKind where + TIgnore == TIgnore = True + TIdent == TIdent = True + TLet == TLet = True + TLambda == TLambda = True + TPOpen == TPOpen = True + TPClose == TPClose = True + TComma == TComma = True + TFatArrow == TFatArrow = True + TDef == TDef = True + TSemi == TSemi = True + _ == _ = False + +TokenKind CCTokenKind where + TokType TIdent = String + TokType _ = () + + tokValue TIgnore s = () + tokValue TIdent s = s + tokValue TLet s = () + tokValue TLambda s = () + tokValue TPOpen s = () + tokValue TPClose s = () + tokValue TComma s = () + tokValue TFatArrow s = () + tokValue TDef s = () + tokValue TSemi s = () + +export +CCToken : Type +CCToken = Token CCTokenKind + +-- Lexer ----------------------------------------------------------------------- + +ignored : List Lexer +ignored = [spaces, lineComment (exact "--"), blockComment (exact "{-") (exact "-}")] + +identifier : Lexer +identifier = alpha <+> many alphaNum + +keywords : List (String, CCTokenKind) +keywords = [("let", TLet)] + +tokenMap : TokenMap CCToken +tokenMap = + toTokenMap (map (, TIgnore) ignored) ++ + [(identifier, \s => + case lookup s keywords of + Just kind => Tok kind s + Nothing => Tok TIdent s) + ] ++ toTokenMap [ + (exact "\\", TLambda), + (exact "\x03BB", TLambda), + (exact "(", TPOpen), + (exact ")", TPClose), + (exact ",", TComma), + (exact "=>", TFatArrow), + (exact "=", TDef), + (exact ";", TSemi) + ] + +-- Parser ---------------------------------------------------------------------- + +%tcinline +parens : {b : Bool} -> Grammar state CCToken b ty -> Grammar state CCToken True ty +parens p = match TPOpen >> commit >> p >>= (\t => match TPClose *> commit *> pure t) + +parseTerm : Grammar state CCToken True RawTerm +parseLambda : Grammar state CCToken True RawTerm +parseLet : Grammar state CCToken True RawTerm +parseSpine : Grammar state CCToken True RawTerm +parseAtom : Grammar state CCToken True RawTerm + +parseTerm = parseLambda <|> parseLet <|> parseSpine + +parseLambda = do + match TLambda + commit + names <- sepBy1 (match TComma >> commit) (MkName <$> (bounds $ match TIdent)) + commit + match TFatArrow + commit + t <- parseTerm + pure $ foldr1By RAbs (flip RAbs t) names + +parseLet = do + match TLet + commit + n <- bounds $ match TIdent + commit + match TDef + commit + t <- parseTerm + match TSemi + commit + u <- parseTerm + pure $ RLet (MkName n) t u + +parseSpine = foldl1 RApp <$> some parseAtom + +parseAtom = + (RVar <$> MkName <$> (bounds $ match TIdent <* commit)) <|> + (parens parseTerm) + +parseSrc : Grammar state CCToken True RawTerm +parseSrc = parseTerm <* eof + +export +parseString : String -> Either (List1 (ParsingError CCToken)) RawTerm +parseString str = do + let (tokens, _, _, "") = lex tokenMap str + | (_, line, col, rest) => + let bounds = MkBounds { startCol = col, startLine = line, endCol = col, endLine = line } in + Left (Error "failed to lex input" (Just bounds) ::: []) + (val, []) <- parse parseSrc $ filter (\tok => tok.val.kind /= TIgnore) tokens + | (_, fst :: rest) => Left (Error "unexpected token" (Just fst.bounds) ::: []) + pure val diff --git a/src/Main.idr b/src/Main.idr index a5ce09c..2f05ccf 100644 --- a/src/Main.idr +++ b/src/Main.idr @@ -2,9 +2,10 @@ module Main import CC.Name import CC.Term +import CC.Term.Eval +import CC.Term.Parse import CC.Term.Pretty import CC.Term.Raw -import CC.Thinning import Data.String @@ -12,45 +13,11 @@ import System import System.File.ReadWrite import System.File.Virtual -import Text.Lexer import Text.Parser import Text.PrettyPrint.Prettyprinter import Text.PrettyPrint.Prettyprinter.Render.Terminal --------------------------------------------------------------------------------- - -app : Closure ctx -> Lazy (Value ctx) -> Value ctx -eval : Env local global -> Term local -> Value global - -app (Close n env t) v = assert_total $ eval (env :< (n, v)) t - -eval env (Var k n prf) = index env k prf -eval env (Abs n t) = VAbs (Close n env t) -eval env (App t u) = - case eval env t of - Ntrl n => Ntrl $ VApp n (eval env u) - VAbs close => app close (eval env u) -eval env (Let n t u) = eval (env :< (n, eval env t)) u - --------------------------------------------------------------------------------- - -quote : {ctx : _} -> Value ctx -> Term ctx -quoteNtrl : {ctx : _} -> Neutral ctx -> Term ctx - -quote (Ntrl n) = quoteNtrl n -quote (VAbs close@(Close n _ _)) = - Abs n $ - assert_total $ - quote $ - app (wknClose close (drop id)) (Ntrl $ VVar 0 n $ Here $ reflexive {rel = (~~)}) - -quoteNtrl (VVar k n prf) = Var k n prf -quoteNtrl (VApp n v) = App (quoteNtrl n) (quote v) - -normalise : {global : _} -> Env local global -> Term local -> Term global -normalise env t = quote $ eval env t - --------------------------------------------------------------------------------- +--- Error Reporting ------------------------------------------------------------ putErrLn : HasIO io => String -> io () putErrLn str = do @@ -67,132 +34,7 @@ putBoundErrLn (Just bounds) str = let endCol = padLeft 2 '0' $ show bounds.endCol in putErrLn "\{startLine}:\{startCol}--\{endLine}:\{endCol}: \{str}" -data CCTokenKind - = TIgnore - | TIdent - -- Keywords - | TLet - -- Symbols - | TLambda - | TPOpen - | TPClose - | TComma - | TFatArrow - | TDef - | TSemi - -%name CCTokenKind k - -Eq CCTokenKind where - TIgnore == TIgnore = True - TIdent == TIdent = True - TLet == TLet = True - TLambda == TLambda = True - TPOpen == TPOpen = True - TPClose == TPClose = True - TComma == TComma = True - TFatArrow == TFatArrow = True - TDef == TDef = True - TSemi == TSemi = True - _ == _ = False - -TokenKind CCTokenKind where - TokType TIdent = String - TokType _ = () - - tokValue TIgnore s = () - tokValue TIdent s = s - tokValue TLet s = () - tokValue TLambda s = () - tokValue TPOpen s = () - tokValue TPClose s = () - tokValue TComma s = () - tokValue TFatArrow s = () - tokValue TDef s = () - tokValue TSemi s = () - -CCToken = Token CCTokenKind - -ignored : List Lexer -ignored = [spaces, lineComment (exact "--"), blockComment (exact "{-") (exact "-}")] - -identifier : Lexer -identifier = alpha <+> many alphaNum - -keywords : List (String, CCTokenKind) -keywords = [("let", TLet)] - -tokenMap : TokenMap CCToken -tokenMap = - toTokenMap (map (, TIgnore) ignored) ++ - [(identifier, \s => - case lookup s keywords of - Just kind => Tok kind s - Nothing => Tok TIdent s) - ] ++ toTokenMap [ - (exact "\\", TLambda), - (exact "\x03BB", TLambda), - (exact "(", TPOpen), - (exact ")", TPClose), - (exact ",", TComma), - (exact "=>", TFatArrow), - (exact "=", TDef), - (exact ";", TSemi) - ] - -%tcinline -parens : {b : Bool} -> Grammar state CCToken b ty -> Grammar state CCToken True ty -parens p = match TPOpen >> commit >> p >>= (\t => match TPClose *> commit *> pure t) - -parseTerm : Grammar state CCToken True RawTerm -parseLambda : Grammar state CCToken True RawTerm -parseLet : Grammar state CCToken True RawTerm -parseSpine : Grammar state CCToken True RawTerm -parseAtom : Grammar state CCToken True RawTerm - -parseTerm = parseLambda <|> parseLet <|> parseSpine - -parseLambda = do - match TLambda - commit - names <- sepBy1 (match TComma >> commit) (MkName <$> (bounds $ match TIdent)) - commit - match TFatArrow - commit - t <- parseTerm - pure $ foldr1By RAbs (flip RAbs t) names - -parseLet = do - match TLet - commit - n <- bounds $ match TIdent - commit - match TDef - commit - t <- parseTerm - match TSemi - commit - u <- parseTerm - pure $ RLet (MkName n) t u - -parseSpine = foldl1 RApp <$> some parseAtom - -parseAtom = - (RVar <$> MkName <$> (bounds $ match TIdent <* commit)) <|> - (parens parseTerm) - -parseSrc : Grammar state CCToken True RawTerm -parseSrc = parseTerm <* eof - -parseString : String -> Either (List1 (ParsingError CCToken)) RawTerm -parseString str = do - let (tokens, _, _, "") = lex tokenMap str - | (_, line, col, rest) => - let bounds = MkBounds { startCol = col, startLine = line, endCol = col, endLine = line } in - Left (Error "failed to lex input" (Just bounds) ::: []) - (val, []) <- parse parseSrc $ filter (\tok => tok.val.kind /= TIgnore) tokens - | (_, fst :: rest) => Left (Error "unexpected token" (Just fst.bounds) ::: []) - pure val +-- Parsing --------------------------------------------------------------------- parse : String -> IO RawTerm parse str = @@ -210,17 +52,7 @@ parseStdin = do | Left err => putErrLn "failed to read input: \{show err}" >> exitFailure parse str --------------------------------------------------------------------------------- - --- -- ("\\" works for lambda as well) --- ex = main' "nf" $ unlines [ --- "let λ λ 1 (1 (1 (1 (1 0))));", -- five = λ s z. s (s (s (s (s z)))) --- "let λ λ λ λ 3 1 (2 1 0);", -- add = λ a b s z. a s (b s z) --- "let λ λ λ λ 3 (2 1) 0;", -- mul = λ a b s z. a (b s) z --- "let 1 2 2;", -- ten = add five five --- "let 1 0 0;", -- hundred = mul ten ten --- "let 2 1 0;", -- thousand = mul ten hundred --- "0" -- thousand +-- Main Function --------------------------------------------------------------- helpMsg : String helpMsg = unlines [ |