Cleanup comments, codegen for primitive type constructors.

This commit is contained in:
2024-09-14 11:39:03 -07:00
parent 4e8f15c3fb
commit e066a304cd
13 changed files with 50 additions and 225 deletions

View File

@@ -5,7 +5,12 @@ Newt is a dependently typed programming language that compiles to javascript. It
my first attempt to write a dependent typed language. It is inspired by Idris, my first attempt to write a dependent typed language. It is inspired by Idris,
Elaboration Zoo, pi-forall, and various tutorials. Elaboration Zoo, pi-forall, and various tutorials.
The goal is to have inductive types, pattern matching, compile to javascript, and be self hosted. At the very least though, I'd like to be able to build a little browser "playground" to compile and run code. I'm still learning how this stuff is done, so it's a bit of a mess. But I'm going to
work with the garage door up.
The goal is to have inductive types, pattern matching, compile to javascript, and be
self hosted. Ideally I could build a little browser "playground" to compile, view
output, and run code.
## Building ## Building

View File

@@ -4,6 +4,7 @@
I may be done with `U` - I keep typing `Type`. I may be done with `U` - I keep typing `Type`.
- [ ] type constructors are no longer generated? And seem to have 0 arity. - [ ] type constructors are no longer generated? And seem to have 0 arity.
- [ ] raw let is not yet implemented (although define used by case tree building)
- [x] there is some zero argument application in generated code - [x] there is some zero argument application in generated code
- [x] get equality.newt to work - [x] get equality.newt to work
- [x] broken again because I added J, probably need to constrain scrutinee to value - [x] broken again because I added J, probably need to constrain scrutinee to value

View File

@@ -1,54 +0,0 @@
-- Can we define the Y combinator in pi-forall?
-- Yes! See below.
-- Note: pi-forall allows recursive definitions,
-- so this is not necessary at all.
module Fix where
-- To type check the Y combinator, we need to have a type
-- D such that D ~~ D -> D
data D (A : Type) : Type where
F of (_ : D A -> D A)
V of (_ : A)
unV : [A:Type] -> D A -> A
unV = \[A] v.
case v of
V y -> y
F f -> TRUSTME
unF :[A:Type] -> D A -> D A -> D A
unF = \[A] v x .
case v of
F f -> f x
V y -> TRUSTME
-- Here's the Y-combinator. To make it type
-- check, we need to add the appropriate conversions
-- into and out of the D type.
fix : [A:Type] -> (A -> A) -> A
fix = \ [A] g.
let omega =
( \x. V (g (unV [A] (unF [A] x x)))
: D A -> D A) in
unV [A] (omega (F omega))
-- Example use case
data Nat : Type where
Zero
Succ of ( _ : Nat)
fix_add : Nat -> Nat -> Nat
fix_add = fix [Nat -> Nat -> Nat]
\radd. \x. \y.
case x of
Zero -> y
Succ n -> Succ (radd n y)
test : fix_add 5 2 = 7
test = Refl

View File

@@ -1,74 +0,0 @@
module Lennart where
-- stack exec -- pi-forall Lennart.pi
-- with unbind / subst
-- 7.81s user 0.52s system 97% cpu 8.568 total
-- with substBind
-- 3.81s user 0.28s system 94% cpu 4.321 total
import Fix
bool : Type
bool = [C : Type] -> C -> C -> C
false : bool
false = \[C]. \f.\t.f
true : bool
true = \[C]. \f.\t.t
nat : Type
nat = [C : Type] -> C -> (nat -> C) -> C
zero : nat
zero = \[C].\z.\s.z
succ : nat -> nat
succ = \n.\[C].\z.\s. s n
one : nat
one = succ zero
two : nat
two = succ one
three : nat
three = succ two
isZero : nat -> bool
isZero = \n.n [bool] true (\m.false)
const : [A:Type] -> A -> A -> A
const = \[A].\x.\y.x
prod : Type -> Type -> Type
prod = \A B. [C:Type] -> (A -> B -> C) -> C
pair : [A :Type] -> [B: Type] -> A -> B -> prod A B
pair = \[A][B] a b. \[C] p. p a b
fst : [A:Type] -> [B:Type] -> prod A B -> A
fst = \[A][B] ab. ab [A] (\a.\b.a)
snd : [A:Type] -> [B:Type] -> prod A B -> B
snd = \[A][B] ab.ab [B] (\a.\b.b)
add : nat -> nat -> nat
add = fix [nat -> nat -> nat]
\radd . \x.\y. x [nat] y (\ n. succ (radd n y))
mul : nat -> nat -> nat
mul = fix [nat -> nat -> nat]
\rmul. \x.\y. x [nat] zero (\ n. add y (rmul n y))
fac : nat -> nat
fac = fix [nat -> nat]
\rfac. \x. x [nat] one (\ n. mul x (rfac n))
eqnat : nat -> nat -> bool
eqnat = fix [nat -> nat -> bool]
\reqnat. \x. \y.
x [bool]
(y [bool] true (\b.false))
(\x1.y [bool] false (\y1. reqnat x1 y1))
sumto : nat -> nat
sumto = fix [nat -> nat]
\rsumto. \x. x [nat] zero (\n. add x (rsumto n))
n5 : nat
n5 = add two three
n6 : nat
n6 = add three three
n17 : nat
n17 = add n6 (add n6 n5)
n37 : nat
n37 = succ (mul n6 n6)
n703 : nat
n703 = sumto n37
n720 : nat
n720 = fac n6
t : (eqnat n720 (add n703 n17)) = true
t = Refl

View File

@@ -37,7 +37,6 @@ forceType (VMeta fc ix sp) = case !(lookupMeta ix) of
(Solved k t) => vappSpine t sp >>= forceType (Solved k t) => vappSpine t sp >>= forceType
forceType x = pure x forceType x = pure x
public export public export
record UnifyResult where record UnifyResult where
constructor MkResult constructor MkResult
@@ -287,7 +286,8 @@ findSplit (_ :: xs) = findSplit xs
-- we could pass into build case and use it for (x /? y) -- we could pass into build case and use it for (x /? y)
-- TODO, we may need to filter these for the type -- TODO, we may need to filter these against the type to rule out
-- impossible cases
getConstructors : Context -> Val -> M (List (String, Nat, Tm)) getConstructors : Context -> Val -> M (List (String, Nat, Tm))
getConstructors ctx (VRef fc nm _ _) = do getConstructors ctx (VRef fc nm _ _) = do
names <- lookupTCon nm names <- lookupTCon nm
@@ -339,28 +339,20 @@ buildCase ctx prob scnm scty (dcName, arity, ty) = do
vty <- eval [] CBN ty vty <- eval [] CBN ty
(ctx', ty', vars, sc) <- extendPi ctx (vty) [<] [<] (ctx', ty', vars, sc) <- extendPi ctx (vty) [<] [<]
-- what is the goal? -- TODO I think we need to figure out what is dotted, maybe
-- we have something here that informs what happens in the casealt, possibly tweaking -- the environment manipulation below is sufficient bookkeeping
-- context or goal -- or maybe it's a bad approach.
-- e.g. we get to end of Just {a} x and goal is Maybe Val, we want `let a = Val` in context.
-- likewise if the constructor says `Foo String` and goal is `Foo x` we know x is String in this branch.
-- we need unify to hand constraints back... -- At some point, I'll take a break and review more papers and code,
-- we may need to walk through the case alt constraint and discharge or drop things? -- now that I know some of the questions I'm trying to answer.
-- should I somehow make those holes? It would be nice to not make the user dot it. -- We get unification constraints from matching the data constructors
-- And we don't _need_ a solution, just if it's unified against -- codomain with the scrutinee type
-- I think I'm going down a different road than Idris..
-- do this or see how other people manage?
-- this puts the failure on the LHS
-- unifying these should say say VVar 1 is Nat.
-- ERROR at (23, 0): unify failed (%var 1 [< ]) =?= (%ref Nat [< ]) [no Fn]
debug "unify dcon dom with scrut" debug "unify dcon dom with scrut"
res <- unify ctx' (length ctx'.env) ty' scty res <- unify ctx' (length ctx'.env) ty' scty
--res <- unify ctx' (length ctx.env) ty' scty
-- Additionally, we constrain the scrutinee's variable to be
-- dcon applied to args
let Just x = findIndex ((==scnm) . fst) ctx'.types let Just x = findIndex ((==scnm) . fst) ctx'.types
| Nothing => error ctx.fc "\{scnm} not is scope?" | Nothing => error ctx.fc "\{scnm} not is scope?"
let lvl = ((length ctx'.env) `minus` (cast x)) `minus` 1 let lvl = ((length ctx'.env) `minus` (cast x)) `minus` 1
@@ -372,12 +364,12 @@ buildCase ctx prob scnm scty (dcName, arity, ty) = do
debug "before env: \{show ctx'.env}" debug "before env: \{show ctx'.env}"
debug "SC CONSTRAINT: \{show scon}" debug "SC CONSTRAINT: \{show scon}"
-- So we go and stuff stuff into the environment, which I guess gets it into the RHS, -- push the constraints into the environment by turning bind into define
-- but doesn't touch goal... -- Is this kosher? It might be a problem if we've already got metas over
-- this stuff, because it intends to ignore defines.
ctx' <- updateContext ctx' (scon :: res.constraints) ctx' <- updateContext ctx' (scon :: res.constraints)
debug "context types: \{show ctx'.types}" debug "context types: \{show ctx'.types}"
debug "context env: \{show ctx'.env}" debug "context env: \{show ctx'.env}"
-- This doesn't really update existing val... including types in the context.
-- What if all of the pattern vars are defined to meta -- What if all of the pattern vars are defined to meta
@@ -387,13 +379,12 @@ buildCase ctx prob scnm scty (dcName, arity, ty) = do
let clauses = mapMaybe (rewriteClause vars) prob.clauses let clauses = mapMaybe (rewriteClause vars) prob.clauses
debug "and now:" debug "and now:"
for_ clauses $ (\x => debug " \{show x}") for_ clauses $ (\x => debug " \{show x}")
-- So ideally we'd know which position we're splitting and the surrounding context
-- That might be a lot to carry forward (maybe a continuation?) but we could carry -- TODO it would be nice to know which position we're splitting and the splits that
-- backwards as a List Missing that we augment as we go up. -- we've already done, so we have a good picture of what is missing for error reporting.
-- We could even stick a little "throw error" tree in here for the case. -- This could be carried forward as a continuation or data, or we could decorate the
-- even going backward, we don't really know where pat$n falls into the expression. -- error on the way back.
-- It would need to keep track of its position. Then fill in the slots (wild vs PCons), or
-- wrapping with PCons as we move back up. E.g. _ (Cons _ (Cons _ _)) _ _ could be missing
when (length clauses == 0) $ error ctx.fc "Missing case for \{dcName} splitting \{scnm}" when (length clauses == 0) $ error ctx.fc "Missing case for \{dcName} splitting \{scnm}"
tm <- buildTree ctx' (MkProb clauses prob.ty) tm <- buildTree ctx' (MkProb clauses prob.ty)
pure $ CaseCons dcName (map getName vars) tm pure $ CaseCons dcName (map getName vars) tm
@@ -448,12 +439,6 @@ lookupName ctx name = go 0 ctx.types
-- FIXME - we should stuff a Binder of some sort into "types" -- FIXME - we should stuff a Binder of some sort into "types"
go ix ((nm, ty) :: xs) = if nm == name then Just (Bnd emptyFC ix, ty) else go (S ix) xs go ix ((nm, ty) :: xs) = if nm == name then Just (Bnd emptyFC ix, ty) else go (S ix) xs
-- FIXME need to check done here...
-- If all of the constraints are assignments, fixup context and type check
-- else bail:
-- error fc "Stuck, no splits \{show constraints}"
checkDone : Context -> List (String, Pattern) -> Raw -> Val -> M Tm checkDone : Context -> List (String, Pattern) -> Raw -> Val -> M Tm
checkDone ctx [] body ty = do checkDone ctx [] body ty = do
debug "DONE-> check body \{show body} at \{show ty}" debug "DONE-> check body \{show body} at \{show ty}"
@@ -507,6 +492,10 @@ buildTree ctx prob@(MkProb ((MkClause fc constraints [] expr) :: cs) ty) = do
pure $ Case fc sctm alts pure $ Case fc sctm alts
showDef : Context -> List String -> Nat -> Val -> M String
showDef ctx names n v@(VVar _ n' [<]) = if n == n' then pure "" else pure "= \{pprint names !(quote ctx.lvl v)}"
showDef ctx names n v = pure "= \{pprint names !(quote ctx.lvl v)}"
check ctx tm ty = case (tm, !(forceType ty)) of check ctx tm ty = case (tm, !(forceType ty)) of
(RCase fc rsc alts, ty) => do (RCase fc rsc alts, ty) => do
-- We've got a beta redex or need to do something... -- We've got a beta redex or need to do something...
@@ -527,17 +516,15 @@ check ctx tm ty = case (tm, !(forceType ty)) of
alts <- traverse (buildCase ctx' (MkProb clauses ty) scnm scty) cons alts <- traverse (buildCase ctx' (MkProb clauses ty) scnm scty) cons
pure $ Let fc scnm sc $ Case fc (Bnd fc 0) alts pure $ Let fc scnm sc $ Case fc (Bnd fc 0) alts
-- buildTree ctx (MkProb{})
-- ?hole
-- Document a hole, pretend it's implemented -- Document a hole, pretend it's implemented
(RHole fc, ty) => do (RHole fc, ty) => do
ty' <- quote ctx.lvl ty ty' <- quote ctx.lvl ty
let names = (toList $ map fst ctx.types) let names = (toList $ map fst ctx.types)
-- I want to know which ones are defines. I should skip the `=` bit if they match
env <- for (zip ctx.env (toList ctx.types)) $ \(v, n, ty) => pure " \{n} : \{pprint names !(quote ctx.lvl ty)} = \{pprint names !(quote ctx.lvl v)}" env <- for (zip ctx.env (toList ctx.types)) $ \(v, n, ty) => pure " \{n} : \{pprint names !(quote ctx.lvl ty)} = \{pprint names !(quote ctx.lvl v)}"
let msg = unlines (toList $ reverse env) ++ " -----------\n" ++ " goal \{pprint names ty'}" let msg = unlines (toList $ reverse env) ++ " -----------\n" ++ " goal \{pprint names ty'}"
liftIO $ putStrLn "INFO at \{show fc}: " putStrLn "INFO at \{show fc}: "
liftIO $ putStrLn msg putStrLn msg
-- let context = unlines foo -- let context = unlines foo
-- need to print 'warning' with position -- need to print 'warning' with position
-- fixme - just put a name on it like idris and stuff it into top. -- fixme - just put a name on it like idris and stuff it into top.

View File

@@ -206,9 +206,9 @@ entryToDoc (MkEntry name ty (Fn tm)) = do
let body = stmtToDoc $ termToJS [] ct JPlain let body = stmtToDoc $ termToJS [] ct JPlain
pure (text "const" <+> text name <+> text "=" <+/> body) pure (text "const" <+> text name <+> text "=" <+/> body)
entryToDoc (MkEntry name type Axiom) = pure "" entryToDoc (MkEntry name type Axiom) = pure ""
entryToDoc (MkEntry name type (TCon strs)) = pure "" entryToDoc (MkEntry name type (TCon strs)) = pure $ dcon name (piArity type)
entryToDoc (MkEntry name type (DCon arity str)) = pure $ dcon name arity entryToDoc (MkEntry name type (DCon arity str)) = pure $ dcon name arity
entryToDoc (MkEntry name _ PrimTCon) = pure $ text "/* PrimTCon \{name} */" entryToDoc (MkEntry name type PrimTCon) = pure $ dcon name (piArity type)
entryToDoc (MkEntry name _ (PrimFn src)) = pure $ text "const" <+> text name <+> "=" <+> text src entryToDoc (MkEntry name _ (PrimFn src)) = pure $ text "const" <+> text name <+> "=" <+> text src
export export

View File

@@ -51,10 +51,10 @@ arityForName fc nm = case lookup nm !get of
-- let the magic hole through for now (will generate bad JS) -- let the magic hole through for now (will generate bad JS)
Nothing => if nm == "?" then pure 0 else error fc "Name \{show nm} not in scope" Nothing => if nm == "?" then pure 0 else error fc "Name \{show nm} not in scope"
(Just (MkEntry name type Axiom)) => pure 0 (Just (MkEntry name type Axiom)) => pure 0
(Just (MkEntry name type (TCon strs))) => pure 0 -- FIXME (Just (MkEntry name type (TCon strs))) => pure $ piArity type
(Just (MkEntry name type (DCon k str))) => pure k (Just (MkEntry name type (DCon k str))) => pure k
(Just (MkEntry name type (Fn t))) => pure $ getArity t (Just (MkEntry name type (Fn t))) => pure $ getArity t
(Just (MkEntry name type (PrimTCon))) => pure 0 (Just (MkEntry name type (PrimTCon))) => pure $ piArity type
-- Assuming a primitive can't return a function -- Assuming a primitive can't return a function
(Just (MkEntry name type (PrimFn t))) => pure $ piArity type (Just (MkEntry name type (PrimFn t))) => pure $ piArity type

View File

@@ -2,8 +2,6 @@ module Lib.Parser
import Lib.Types import Lib.Types
import Debug.Trace import Debug.Trace
-- The FC stuff is awkward later on. We might want bounds on productions
-- But we might want to consider something more generic and closer to lean?
-- app: foo {a} a b -- app: foo {a} a b
-- lam: λ {A} {b : A} (c : Blah) d e f => something -- lam: λ {A} {b : A} (c : Blah) d e f => something
@@ -17,19 +15,6 @@ import Lib.Syntax
import Data.List import Data.List
import Data.Maybe import Data.Maybe
-- There is the whole core vs surface thing here.
-- might be best to do core first/ Technically don't
-- _need_ a parser, but would be useful for testing.
-- look to pi-forall / ezoo, but I think we start with a
-- TTImpl level grammar, then add a more sugared layer above
-- so holes and all that
-- After the parser runs, see below, take a break and finish pi-forall
-- exercises. There is some fill in the parser stuff that may show
-- the future.
ident = token Ident <|> token MixFix ident = token Ident <|> token MixFix
uident = token UIdent uident = token UIdent

View File

@@ -1,10 +1,10 @@
module Lib.Parser.Impl module Lib.Parser.Impl
-- For some reason I'm doing Idris' commit / mustWork dance here, even though it -- This follows Idris, not sure why I did that because commit / mustWork is messy
-- seems to be painful -- and painful to work with. I _think_ a commit on consumption of anything, like parsec
-- would work better.
-- Probably would like to do "did consume anything" on the input, but we might need -- Perhaps we can set the commit flag on consumption and get that with minor changes.
-- something other than a token list
-- TODO see what Kovacs' flatparse does for error handling / <|> -- TODO see what Kovacs' flatparse does for error handling / <|>
@@ -75,12 +75,12 @@ Functor Result where
-- FC is a line and column for indents. The idea being that we check -- FC is a line and column for indents. The idea being that we check
-- either the col < tokCol or line == tokLine, enabling sameLevel. -- either the col < tokCol or line == tokLine, enabling sameLevel.
-- This is a Reader in FC -- We need State for operators (or postpone that to elaboration)
-- we need State for operators (or postpone that to elaboration)
-- Since I've already built out the pratt stuff, I guess I'll -- Since I've already built out the pratt stuff, I guess I'll
-- leave it in the parser for now -- leave it in the parser for now
-- This is a Reader in FC, a State in Operators, Commit flag, TokenList
export export
data Parser a = P (TokenList -> Bool -> List (String, Int, Fixity) -> (lc : FC) -> Result a) data Parser a = P (TokenList -> Bool -> List (String, Int, Fixity) -> (lc : FC) -> Result a)
@@ -140,7 +140,7 @@ Applicative Parser where
(OK x toks com ops) => OK (f x) toks com ops (OK x toks com ops) => OK (f x) toks com ops
(Fail fatal err toks com ops) => Fail fatal err toks com ops (Fail fatal err toks com ops) => Fail fatal err toks com ops
-- REVIEW it would be nice if the second argument was lazy... -- Second argument lazy so we don't have circular refs when defining parsers.
export export
(<|>) : Parser a -> Lazy (Parser a) -> Parser a (<|>) : Parser a -> Lazy (Parser a) -> Parser a
(P pa) <|> (P pb) = P $ \toks,com,ops,col => (P pa) <|> (P pb) = P $ \toks,com,ops,col =>
@@ -158,7 +158,6 @@ Monad Parser where
(Fail fatal err xs x ops) => Fail fatal err xs x ops (Fail fatal err xs x ops) => Fail fatal err xs x ops
-- do we need this?
pred : (BTok -> Bool) -> String -> Parser String pred : (BTok -> Bool) -> String -> Parser String
pred f msg = P $ \toks,com,ops,col => pred f msg = P $ \toks,com,ops,col =>
case toks of case toks of
@@ -176,12 +175,6 @@ mutual
export many : Parser a -> Parser (List a) export many : Parser a -> Parser (List a)
many p = some p <|> pure [] many p = some p <|> pure []
-- sixty let has some weird CPS stuff, but essentially:
-- case token_ of
-- Lexer.LLet -> PLet <$> blockOfMany let_ <* token Lexer.In <*> term
-- withIndentationBlock - sets the col
export export
getPos : Parser FC getPos : Parser FC
getPos = P $ \toks,com, ops, (l,c) => case toks of getPos = P $ \toks,com, ops, (l,c) => case toks of
@@ -218,7 +211,7 @@ export
manySame : Parser a -> Parser (List a) manySame : Parser a -> Parser (List a)
manySame pa = many $ sameLevel pa manySame pa = many $ sameLevel pa
||| requires a token to be indented? ||| check indent on next token and run parser
export export
indented : Parser a -> Parser a indented : Parser a -> Parser a
indented (P p) = P $ \toks,com,ops,(l,c) => case toks of indented (P p) = P $ \toks,com,ops,(l,c) => case toks of
@@ -228,6 +221,7 @@ indented (P p) = P $ \toks,com,ops,(l,c) => case toks of
in if tc > c || tl == l then p toks com ops (l,c) in if tc > c || tl == l then p toks com ops (l,c)
else Fail False (error toks "unexpected outdent") toks com ops else Fail False (error toks "unexpected outdent") toks com ops
||| expect token of given kind
export export
token' : Kind -> Parser String token' : Kind -> Parser String
token' k = pred (\t => t.val.kind == k) "Expected a \{show k} token" token' k = pred (\t => t.val.kind == k) "Expected a \{show k} token"
@@ -236,6 +230,7 @@ export
keyword' : String -> Parser () keyword' : String -> Parser ()
keyword' kw = ignore $ pred (\t => t.val.text == kw) "Expected \{kw}" keyword' kw = ignore $ pred (\t => t.val.text == kw) "Expected \{kw}"
||| expect indented token of given kind
export export
token : Kind -> Parser String token : Kind -> Parser String
token = indented . token' token = indented . token'

View File

@@ -65,10 +65,6 @@ processDecl (Def fc nm clauses) = do
let (MkEntry name ty Axiom) := entry let (MkEntry name ty Axiom) := entry
| _ => throwError $ E fc "\{nm} already defined" | _ => throwError $ E fc "\{nm} already defined"
-- and we pass to the case tree stuff now
-- maybe fix up the clauses to match?
-- Also we need to distinguish DCon/var
putStrLn "check \{nm} ... at \{pprint [] ty}" putStrLn "check \{nm} ... at \{pprint [] ty}"
vty <- eval empty CBN ty vty <- eval empty CBN ty
putStrLn "vty is \{show vty}" putStrLn "vty is \{show vty}"
@@ -82,6 +78,7 @@ processDecl (Def fc nm clauses) = do
(Solved k x) => pure () (Solved k x) => pure ()
(Unsolved (l,c) k xs) => do (Unsolved (l,c) k xs) => do
-- should just print, but it's too subtle in the sea of messages -- should just print, but it's too subtle in the sea of messages
-- we'd also need the ability to mark the whole top context as failure if we continue
-- putStrLn "ERROR at (\{show l}, \{show c}): Unsolved meta \{show k}" -- putStrLn "ERROR at (\{show l}, \{show c}): Unsolved meta \{show k}"
throwError $ E (l,c) "Unsolved meta \{show k}" throwError $ E (l,c) "Unsolved meta \{show k}"
debug "Add def \{nm} \{pprint [] tm'} : \{pprint [] ty}" debug "Add def \{nm} \{pprint [] tm'} : \{pprint [] ty}"
@@ -97,19 +94,13 @@ processDecl (DCheck fc tm ty) = do
putStrLn "got \{pprint [] res}" putStrLn "got \{pprint [] res}"
norm <- nf [] res norm <- nf [] res
putStrLn "norm \{pprint [] norm}" putStrLn "norm \{pprint [] norm}"
-- top <- get
-- ctx <- mkCtx top.metas
-- I need a type to check against
-- norm <- nf [] x
putStrLn "NF " putStrLn "NF "
processDecl (DImport fc str) = throwError $ E fc "import not implemented" processDecl (DImport fc str) = throwError $ E fc "import not implemented"
processDecl (Data fc nm ty cons) = do processDecl (Data fc nm ty cons) = do
-- It seems like the FC for the errors are not here?
ctx <- get ctx <- get
tyty <- check (mkCtx ctx.metas fc) ty (VU fc) tyty <- check (mkCtx ctx.metas fc) ty (VU fc)
-- FIXME we need this in scope, but need to update
modify $ setDef nm tyty Axiom modify $ setDef nm tyty Axiom
ctx <- get ctx <- get
cnames <- for cons $ \x => case x of cnames <- for cons $ \x => case x of

View File

@@ -174,9 +174,6 @@ eval env mode (Lit fc lit) = pure $ VLit fc lit
-- We need a neutral and some code to run the case tree -- We need a neutral and some code to run the case tree
eval env mode tm@(Case fc sc alts) = pure $ VCase fc !(eval env mode sc) alts eval env mode tm@(Case fc sc alts) = pure $ VCase fc !(eval env mode sc) alts
-- case !(evalAlt env mode !(eval env mode sc) alts) of
-- Just foo => ?goodAlt
-- Nothing => ?stuckAlt
export export
quote : (lvl : Nat) -> Val -> M Tm quote : (lvl : Nat) -> Val -> M Tm

View File

@@ -1,8 +1,4 @@
module Lib.Types module Lib.Types
-- I'm not sure this is related, or just a note to self (Presheaves on Porpoise)
-- maybe watch https://www.youtube.com/watch?v=3gef0_NFz8Q
-- or drop the indices for now.
-- For FC, Error -- For FC, Error
import public Lib.Parser.Impl import public Lib.Parser.Impl

View File

@@ -30,7 +30,3 @@ splitTele = go []
go : List Binder -> Tm -> (Tm, List Binder) go : List Binder -> Tm -> (Tm, List Binder)
go ts (Pi fc nm icit t u) = go (MkBind fc nm icit t :: ts) u go ts (Pi fc nm icit t u) = go (MkBind fc nm icit t :: ts) u
go ts tm = (tm, reverse ts) go ts tm = (tm, reverse ts)
-- splitTele (Pi x str icit t u) = ?splitTele_rhs_6
-- splitTele tm =