From: Adam Megacz Date: Sun, 15 May 2011 06:36:47 +0000 (-0700) Subject: Merge branch 'coq-extraction-baked-in' of /afs/megacz.com/pub/software/coq-hetmet X-Git-Url: http://git.megacz.com/?p=coq-hetmet.git;a=commitdiff_plain;h=852d8660c841ea0812ff9315ae53eaf137ca0327;hp=176674d506ca72f9ddafb19da4691f8cf8b0bbb9 Merge branch 'coq-extraction-baked-in' of /afs/megacz.com/pub/software/coq-hetmet --- diff --git a/.gitignore b/.gitignore index aa0d01f..f7f85d1 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ examples/tutorial.tex examples/tutorial.pdf build/ build/** +examples/.build diff --git a/Makefile b/Makefile index c77a3dd..37512bf 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ coqc := coqc -noglob -opt coqfiles := $(shell find src -name \*.v | grep -v \\\#) allfiles := $(coqfiles) $(shell find src -name \*.hs | grep -v \\\#) +coq_version := 8.3pl2-tracer default: all @@ -9,6 +10,7 @@ all: $(allfiles) cd build; $(MAKE) -f Makefile.coq OPT="-opt -dont-load-proofs" All.vo build/CoqPass.hs: $(allfiles) + $(coqc) -v | grep 'version $(coq_version)' || (echo;echo "You need Coq version $(coq_version) to proceed";echo; false) make build/Makefile.coq cd build; $(MAKE) -f Makefile.coq OPT="-opt -dont-load-proofs" ExtractionMain.vo cd build; $(MAKE) -f Makefile.coq Extraction.vo @@ -29,10 +31,23 @@ src/categories/src: clean: rm -rf build +examples/test.pdf: + ../../../inplace/bin/ghc-stage2 GArrowTikZ.hs + ./GArrowTikZ > test.tex + pdflatex test.tex + open test.pdf + +examples/doc/index.html: + mkdir -p examples/doc + haddock --html Unify.hs + open Unify.html + + merged: mkdir -p .temp cd src; for A in *.v; do cat $$A | grep -v '^Require Import' > ../.temp/`echo $$A | sed s_\\\\.v_._`; done - cd src/categories/src; for A in *.v; do cat $$A | grep -v '^Require Import' > ../../../.temp/`echo $$A | sed s_\\\\.v_._`; done + cd src/categories/src; for A in *.v; do cat $$A | \ + grep -v '^Require Import' > ../../../.temp/`echo $$A | sed s_\\\\.v_._`; done cp src/Banner.v .temp/GArrows.v cd .temp; grep '^Require Import ' ../src/All.v | sed 's_Require Import _echo;echo;echo;echo;echo;cat _' | bash >> GArrows.v cd .temp; time $(coqc) -dont-load-proofs -verbose GArrows.v diff --git a/examples/Demo.hs b/examples/Demo.hs new file mode 100644 index 0000000..f5069fa --- /dev/null +++ b/examples/Demo.hs @@ -0,0 +1,17 @@ +{-# OPTIONS_GHC -XRankNTypes -XScopedTypeVariables -XFlexibleContexts -XModalTypes -XKindSignatures -fcoqpass -XMultiParamTypeClasses -dcore-lint #-} +import GHC.HetMet.GArrow +import GHC.HetMet.CodeTypes +import GHC.HetMet.Private +import GArrowTikZ + +{- +foo :: (forall g a . <[ () -> a + PGArrow g (GArrowUnit g) a -> + (forall b . PGArrow g (GArrowTensor g b b) b) -> +-} +--foo con mer = <[ ~~mer ~~con ~~con ]> +foo f = <[ ~~f ]> + +--tester2 f = <[ \x -> ~~f x x ]> + +main = tikz' $ \a b -> pga_flatten (foo (pga_unflatten a)) diff --git a/examples/GArrowTikZ.hs b/examples/GArrowTikZ.hs index 1f62969..b3a468f 100644 --- a/examples/GArrowTikZ.hs +++ b/examples/GArrowTikZ.hs @@ -1,36 +1,446 @@ -{-# OPTIONS_GHC -XModalTypes -XMultiParamTypeClasses -ddump-types -XNoMonoPatBinds #-} -module GArrowTikZ +{-# OPTIONS_GHC -XModalTypes -XMultiParamTypeClasses -XNoMonoPatBinds -XKindSignatures -XGADTs -XFlexibleContexts -XFlexibleInstances -XTypeOperators -XUndecidableInstances -XTypeFamilies #-} +module GArrowTikZ (tikz, tikz', GArrowTikZ(..)) where -import Prelude hiding ( id, (.) ) +import Prelude hiding ( id, (.), lookup ) +import Control.Category +import GHC.HetMet.GArrow +import Data.List hiding (lookup, insert) +import Data.Map hiding (map, (!)) +import Unify +import GHC.HetMet.Private + +{- +TO DO: + - have "resolve" turn a (Diagram UnifVal) into (Diagram Int) + - custom rendering + - bias right now is for all edges to be uppermost; try for bias + towards smallest nodes? + - curvy boxes (like XOR gates) +-} + + +-- a unification value is basically a LISP-ish expression +data UVal = + UVVar UVar + | UVVal [UVal] + +instance Unifiable UVal where + unify' (UVVal vl1) (UVVal vl2) + | length vl1 /= length vl2 = error "length mismatch during unification" + | otherwise = foldr mergeU emptyUnifier (map (\(x,y) -> unify x y) $ zip vl1 vl2) + unify' _ _ = error "impossible" + inject = UVVar + project (UVVar v) = Just v + project _ = Nothing + occurrences (UVVar v) = [v] + occurrences (UVVal vl) = concatMap occurrences vl + +-- | Resolves a unification variable; the answer to this query will change if subsequent unifications are performed +getU' :: Unifier UVal -> UVal -> UVal +getU' u (UVVal vl) = UVVal $ map (getU' u) vl +getU' u x@(UVVar v) = case Unify.getU u v of + Nothing -> x + Just x' -> getU' u x' + + + -- --- Render a fully-polymorphic GArrow term as a boxes-and-wires diagram using TikZ +-- | Render a fully-polymorphic GArrow term as a boxes-and-wires diagram using TikZ -- -{- -instance GArrow GArrowTikZ (,) where - ga_id = - ga_comp f g = - ga_second f = - ga_cancell f = - ga_cancelr f = - ga_uncancell f = - ga_uncancelr f = - ga_assoc f = - ga_unassoc f = - -instance GArrowDrop GArrowTikZ (,) where - ga_drop = - -instance GArrowCopy GArrowTikZ (,) where - ga_copy = - -instance GArrowSwap GArrowTikZ (,) where - ga_swap = - -instance GArrowLoop GArrowTikZ (,) where - ga_loop = - -instance GArrowLiteral GArrowTikZ (,) where - ga_literal = --} +type Constraints = [(UVal, Int, UVal)] + +-- the unification monad +data UyM t a = UyM (([UVar],Unifier UVal,Constraints) -> ([UVar],Unifier UVal,Constraints,a)) +instance Monad (UyM t) + where + return x = UyM $ \(i,u,k) -> (i,u,k,x) + (UyM f) >>= g = UyM $ \(i,u,k) -> let (i',u',k',x) = f (i,u,k) in let UyM g' = g x in g' (i',u',k') + + +getU = UyM $ \(i,u,k) -> (i,u,k,u) +getM v = UyM $ \(i,u,k) -> (i,u,k,getU' u v) +occursU v x = UyM $ \(i,u,k) -> (i,u,k,occurs u v x) +unifyM :: Eq t => UVal -> UVal -> UyM t () +unifyM v1 v2 = UyM $ \(i,u,k) -> (i,mergeU u (unify v1 v2),k,()) +freshU :: UyM t UVar +freshU = UyM $ \(i:is,u,k) -> (is,u,k,i) +constrain :: UVal -> Int -> UVal -> UyM t () +constrain v1 d v2 = UyM $ \(i,u,k) -> (i,u,((v1,d,v2):k),()) +getK :: UyM t [(UVal, Int, UVal)] +getK = UyM $ \(i,u,k) -> (i,u,k,k) +runU :: UyM t a -> ([UVar],Unifier UVal,Constraints,a) +runU (UyM f) = (f (uvarSupply,emptyUnifier,[])) + +data GArrowTikZ :: * -> * -> * + where + TikZ_const :: Int -> GArrowTikZ () Int + TikZ_id :: GArrowTikZ x x + TikZ_comp :: GArrowTikZ y z -> GArrowTikZ x y -> GArrowTikZ x z + TikZ_first :: GArrowTikZ x y -> GArrowTikZ (x**z) (y**z) + TikZ_second :: GArrowTikZ x y -> GArrowTikZ (z**x) (z**y) + TikZ_cancell :: GArrowTikZ (()**x) x + TikZ_cancelr :: GArrowTikZ (x**()) x + TikZ_uncancell :: GArrowTikZ x (()**x) + TikZ_uncancelr :: GArrowTikZ x (x**()) + TikZ_assoc :: GArrowTikZ ((x**y)**z) (x**(y**z)) + TikZ_unassoc :: GArrowTikZ (x**(y**z)) ((x**y)**z) + TikZ_drop :: GArrowTikZ x () + TikZ_copy :: GArrowTikZ x (x**x) + TikZ_swap :: GArrowTikZ (x**y) (y**x) + TikZ_merge :: GArrowTikZ (x**y) z + TikZ_loopl :: GArrowTikZ (x**z) (y**z) -> GArrowTikZ x y + TikZ_loopr :: GArrowTikZ (z**x) (z**y) -> GArrowTikZ x y + +-- +-- Technically this instance violates the laws (and RULEs) for +-- Control.Category; the compiler might choose to optimize (f >>> id) +-- into f, and this optimization would produce a change in behavior +-- below. In practice this means that the user must be prepared for +-- the rendered TikZ diagram to be merely *equivalent to* his/her +-- term, rather than structurally exactly equal to it. +-- +instance Category GArrowTikZ where + id = TikZ_id + (.) = TikZ_comp + +instance GArrow GArrowTikZ (**) () where + ga_first = TikZ_first + ga_second = TikZ_second + ga_cancell = TikZ_cancell + ga_cancelr = TikZ_cancelr + ga_uncancell = TikZ_uncancell + ga_uncancelr = TikZ_uncancelr + ga_assoc = TikZ_assoc + ga_unassoc = TikZ_unassoc + +instance GArrowDrop GArrowTikZ (**) () where + ga_drop = TikZ_drop + +instance GArrowCopy GArrowTikZ (**) () where + ga_copy = TikZ_copy + +instance GArrowSwap GArrowTikZ (**) () where + ga_swap = TikZ_swap + +instance GArrowLoop GArrowTikZ (**) () where + ga_loopl = TikZ_loopl + ga_loopr = TikZ_loopr + +type instance GArrowTensor GArrowTikZ = (,) +type instance GArrowUnit GArrowTikZ = () +type instance GArrowExponent GArrowTikZ = (->) + +instance GArrowSTKC GArrowTikZ + + +name :: GArrowTikZ a b -> String +name TikZ_id = "id" +name (TikZ_const i) = "const " ++ show i +name (TikZ_comp _ _) = "comp" +name (TikZ_first _ ) = "first" +name (TikZ_second _ ) = "second" +name TikZ_cancell = "cancell" +name TikZ_cancelr = "cancelr" +name TikZ_uncancell = "uncancell" +name TikZ_uncancelr = "uncancelr" +name TikZ_drop = "drop" +name TikZ_copy = "copy" +name TikZ_swap = "swap" +name (TikZ_loopl _ ) = "loopl" +name (TikZ_loopr _ ) = "loopr" +name TikZ_assoc = "assoc" +name TikZ_unassoc = "unassoc" + +fresh1 :: UyM () Ports +fresh1 = do { x <- freshU + ; return $ UVVar x + } + +fresh2 :: UyM () (Ports,Ports) +fresh2 = do { x <- freshU + ; y <- freshU + ; constrain (UVVar x) 1 (UVVar y) + ; return $ (UVVar x,UVVar y) + } + +fresh3 :: UyM () (Ports,Ports,Ports) +fresh3 = do { x <- freshU + ; y <- freshU + ; z <- freshU + ; constrain (UVVar x) 1 (UVVar y) + ; constrain (UVVar y) 1 (UVVar z) + ; return $ (UVVar x,UVVar y,UVVar z) + } + +fresh4 :: UyM () (Ports,Ports,Ports,Ports) +fresh4 = do { x1 <- freshU + ; x2 <- freshU + ; x3 <- freshU + ; x4 <- freshU + ; constrain (UVVar x1) 1 (UVVar x2) + ; constrain (UVVar x2) 1 (UVVar x3) + ; constrain (UVVar x3) 1 (UVVar x4) + ; return $ (UVVar x1,UVVar x2,UVVar x3,UVVar x4) + } + +fresh5 :: UyM () (Ports,Ports,Ports,Ports,Ports) +fresh5 = do { x1 <- freshU + ; x2 <- freshU + ; x3 <- freshU + ; x4 <- freshU + ; x5 <- freshU + ; constrain (UVVar x1) 1 (UVVar x2) + ; constrain (UVVar x2) 1 (UVVar x3) + ; constrain (UVVar x3) 1 (UVVar x4) + ; constrain (UVVar x4) 1 (UVVar x5) + ; return $ (UVVar x1,UVVar x2,UVVar x3,UVVar x4,UVVar x5) + } + + + + +--example = ga_first ga_drop >>> ga_cancell >>> ga_first id >>> ga_swap >>> ga_first id >>> TikZ_merge +--example :: forall x y z. forall g. (GArrow g (,) (), GArrowCopy g (,) (), GArrowSwap g (,) ()) => g x ((x,x),x) +--example = ga_copy >>> ga_second ga_copy >>> ga_second (ga_first id) >>> ga_unassoc >>> ga_first ga_swap +--example = ga_copy >>> ga_second ga_copy >>> ga_second (ga_second id) >>> ga_unassoc >>> ga_first id >>> ga_first ga_swap +--example :: forall x. forall g. (GArrow g (,) (), GArrowCopy g (,) (), GArrowSwap g (,) ()) => g x x +--example = id >>> id + +data Diagram p = DiagramComp (Diagram p) (Diagram p) + | DiagramPrim String p p p p (String -> Int -> Int -> Int -> p -> p -> Int -> String) + | DiagramBypassTop p (Diagram p) + | DiagramBypassBot (Diagram p) p + -- | DiagramLoopTop Diagram + -- | DiagramLoopBot Diagram + +type Ports = UVal + +getOut :: Diagram Ports -> Ports +getOut (DiagramComp f g) = getOut g +getOut (DiagramPrim s ptop pin pout pbot _) = pout +getOut (DiagramBypassTop p f) = UVVal [p, (getOut f)] +getOut (DiagramBypassBot f p) = UVVal [(getOut f), p] + +getIn :: Diagram Ports -> Ports +getIn (DiagramComp f g) = getIn f +getIn (DiagramPrim s ptop pin pout pbot _) = pin +getIn (DiagramBypassTop p f) = UVVal [p, (getIn f)] +getIn (DiagramBypassBot f p) = UVVal [(getIn f), p] + +-- constrain that Ports is at least Int units above the topmost portion of Diagram +constrainTop :: Ports -> Int -> Diagram Ports -> UyM () () +constrainTop v i (DiagramComp d1 d2) = do { constrainTop v i d1 ; constrainTop v i d2 ; return () } +constrainTop v i (DiagramBypassTop p d) = constrain v 2 p +constrainTop v i (DiagramBypassBot d p) = constrainTop v (i+1) d +constrainTop v i (DiagramPrim s ptop pin pout pbot _) = constrain v i ptop + +-- constrain that Ports is at least Int units below the bottommost portion of Diagram +constrainBot :: Diagram Ports -> Int -> Ports -> UyM () () +constrainBot (DiagramComp d1 d2) i v = do { constrainBot d1 i v ; constrainBot d2 i v ; return () } +constrainBot (DiagramBypassTop p d) i v = constrainBot d (i+1) v +constrainBot (DiagramBypassBot d p) i v = constrain p 2 v +constrainBot (DiagramPrim s ptop pin pout pbot _) i v = constrain pbot i v + +ga2diag :: GArrowTikZ a b -> UyM () (Diagram Ports) +ga2diag (TikZ_id ) = do { (top,x,bot) <- fresh3 ; return $ DiagramPrim "id" top x x bot defren } +ga2diag (TikZ_comp g f) = do { f' <- ga2diag f + ; g' <- ga2diag g + ; unifyM (getOut f') (getIn g') + ; return $ DiagramComp f' g' } +ga2diag (TikZ_first f) = do { x <- fresh1; f' <- ga2diag f ; constrainBot f' 1 x ; return $ DiagramBypassBot f' x } +ga2diag (TikZ_second f) = do { x <- fresh1; f' <- ga2diag f ; constrainTop x 1 f' ; return $ DiagramBypassTop x f' } +ga2diag TikZ_cancell = do { (top,x,y ,bot) <- fresh4 ; return $ DiagramPrim "cancell" top (UVVal [x,y]) y bot defren } +ga2diag TikZ_cancelr = do { (top,x,y ,bot) <- fresh4 ; return $ DiagramPrim "cancelr" top (UVVal [x,y]) x bot defren } +ga2diag TikZ_uncancell = do { (top,x,y ,bot) <- fresh4 ; return $ DiagramPrim "uncancell" top y (UVVal [x,y]) bot defren } +ga2diag TikZ_uncancelr = do { (top,x,y ,bot) <- fresh4 ; return $ DiagramPrim "uncancelr" top x (UVVal [x,y]) bot defren } +ga2diag TikZ_drop = do { (top,x ,bot) <- fresh3 ; return $ DiagramPrim "drop" top x x bot defren } +ga2diag (TikZ_const i) = do { (top,x ,bot) <- fresh3 ; return $ DiagramPrim ("const " ++ show i) top x x bot defren } +ga2diag TikZ_copy = do { (top,x,y,z,bot) <- fresh5 + ; return $ DiagramPrim "copy" top y (UVVal [x,z]) bot defren } +ga2diag TikZ_merge = do { (top,x,y,z,bot) <- fresh5 + ; return $ DiagramPrim "merge" top (UVVal [x,z]) y bot defren } +ga2diag TikZ_swap = do { (top,x,y ,bot) <- fresh4 + ; return $ DiagramPrim "swap" top (UVVal [x,y]) (UVVal [x,y]) bot defren } +ga2diag TikZ_assoc = do { (top,x,y,z,bot) <- fresh5 + ; return $ DiagramPrim "assoc" top (UVVal [UVVal [x,y],z])(UVVal [x,UVVal [y,z]]) bot defren } +ga2diag TikZ_unassoc = do { (top,x,y,z,bot) <- fresh5 + ; return $ DiagramPrim "unassoc" top (UVVal [x,UVVal [y,z]])(UVVal [UVVal [x,y],z]) bot defren } +ga2diag (TikZ_loopl f) = error "not implemented" +ga2diag (TikZ_loopr f) = error "not implemented" + +defren :: String -> Int -> Int -> Int -> Ports -> Ports -> Int -> String +defren s x w top p1 p2 bot + = drawBox x top (x+w) bot "black" s +-- ++ wires (x-1) p1 x "green" +-- ++ wires (x+w) p2 (x+w+1) "red" + +xscale = 1 +yscale = 1 + +textc x y text color = + "\\node[anchor=center,color="++color++"] at ("++show (x*xscale)++"cm,"++show (y*yscale)++"cm) "++ + "{{\\tt{"++text++"}}};\n" + +drawBox x1 y1 x2 y2 color text = + "\\node[anchor=north west] at ("++show (x1*xscale)++"cm,"++show (y1*yscale)++"cm) "++ + "{{\\tt{"++text++"}}};\n" + ++ + "\\path[draw,color="++color++"]"++ + " ("++show (x1*xscale)++","++show (y1*yscale)++") rectangle ("++ + show (x2*xscale)++","++show (y2*yscale)++");\n" + +drawLine x1 y1 x2 y2 color style = + "\\path[draw,color="++color++","++style++"] "++ + "("++show (x1*xscale)++","++show (y1*yscale)++") -- " ++ + "("++show (x2*xscale)++","++show (y2*yscale)++");\n" + +width :: Diagram Ports -> Int +width (DiagramComp d1 d2) = (width d1) + 1 + (width d2) +width (DiagramPrim s _ p1 p2 _ _) = 2 +width (DiagramBypassTop p d) = (width d) + 2 +width (DiagramBypassBot d p) = (width d) + 2 + +(!) :: Map UVar Int -> UVar -> Int +m ! x = case lookup x m of + Nothing -> 0 + Just y -> y + +tikZ :: Map UVar Int -> + Diagram Ports -> + Int -> -- horizontal position + String +tikZ m = tikZ' + where + tikZ' d@(DiagramComp d1 d2) x = tikZ' d1 x + ++ wires (x+width d1) (getOut d1) (x+width d1+1) "black" + ++ tikZ' d2 (x + width d1 + 1) + tikZ' d'@(DiagramBypassTop p d) x = let top = getTop d' in + let bot = getBot d' in + drawBox x top (x+width d') bot "gray!50" "second" + ++ drawLine x (top+1) (x+width d') (top+1) "black" "->" + ++ wires x (getIn d) (x+1) "black" + ++ tikZ' d (x+1) + ++ wires (x+1+width d) (getOut d) (x+1+width d+1) "black" + tikZ' d'@(DiagramBypassBot d p) x = let top = getTop d' in + let bot = getBot d' in + drawBox x top (x+width d') bot "gray!50" "first" + ++ drawLine x (bot-1) (x+width d') (bot-1) "black" "->" + ++ wires x (getIn d) (x+1) "black" + ++ tikZ' d (x+1) + ++ wires (x+1+width d) (getOut d) (x+1+width d+1) "black" + tikZ' d@(DiagramPrim s (UVVar top) p1 p2 (UVVar bot) r) x = r s x (width d) (m ! top) p1 p2 (m ! bot) + + wires :: Int -> Ports -> Int -> String -> String + wires x1 (UVVar v) x2 color = drawLine x1 (m ! v) x2 (m ! v) color "->" + -- ++ textc ((x1+x2) `div` 2) (m!v) (show v) "purple" + wires x1 (UVVal vl) x2 color = foldr (\x y -> x ++ " " ++ y) [] (map (\v -> wires x1 v x2 color) vl) + + getTop :: Diagram Ports -> Int + getTop (DiagramComp d1 d2) = min (getTop d1) (getTop d2) + getTop (DiagramBypassTop p d) = (m ! getleft p) - 1 + getTop (DiagramBypassBot d p) = getTop d - 1 + getTop (DiagramPrim s (UVVar ptop) _ _ _ _) = m ! ptop + + getBot :: Diagram Ports -> Int + getBot (DiagramComp d1 d2) = max (getBot d1) (getBot d2) + getBot (DiagramBypassTop p d) = getBot d + 1 + getBot (DiagramBypassBot d p) = (m ! getright p) + 1 + getBot (DiagramPrim s _ _ _ (UVVar pbot) _) = m ! pbot + +resolve' (DiagramComp d1 d2) = do { d1' <- resolve' d1 ; d2' <- resolve' d2 ; return $ DiagramComp d1' d2' } +resolve' (DiagramBypassTop p d) = do { p' <- getM p ; d' <- resolve' d ; return $ DiagramBypassTop p' d' } +resolve' (DiagramBypassBot d p) = do { p' <- getM p ; d' <- resolve' d ; return $ DiagramBypassBot d' p' } +resolve' (DiagramPrim s ptop pin pout pbot r) + = do { ptop' <- getM ptop + ; pbot' <- getM pbot + ; pin' <- getM pin + ; pout' <- getM pout + ; return $ DiagramPrim s ptop' pin' pout' pbot' r } + +getleft (UVVar v) = v +getleft (UVVal vl) = getleft (head vl) + +getright (UVVar v) = v +getright (UVVal vl) = getright (last vl) + +strip :: [(Ports,Int,Ports)] -> [(UVar,Int,UVar)] +strip = map (\(v1,x,v2) -> (getright v1, x, getleft v2)) + + +-- must use bubblesort because the ordering isn't transitive +sortit :: [(UVar,Int,UVar)] -> [(UVar,Int,UVar)] +sortit x = stupidSort x [] + where + stupidSort [] x = x + stupidSort (h:t) x = stupidSort t (stupidInsert h x) + + stupidInsert t [] = [t] + stupidInsert t@(_,_,t') ((a@(_,_,a')):b) = if cmp' x t' a' == LT + then t:a:b + else a:(stupidInsert t b) + + cmp' :: [(UVar,Int,UVar)] -> UVar -> UVar -> Ordering + cmp' [] a b = EQ -- compare a b + cmp' ((v1,_,v2):r) a b = if a == v1 && b==v2 + then LT + else if a == v2 && b==v1 + then GT + else cmp' r a b + +lookup'' :: Map UVar Int -> UVar -> Int +lookup'' m x = case lookup x m of + Nothing -> 0 + Just y -> y + +valuatit :: Map UVar Int -> [(UVar,Int,UVar)] -> Map UVar Int +valuatit m [] = m +valuatit m ((v1,k,v2):r) = valuatit m' r + where + m' = insert v2 v2' m + v2' = max (lookup'' m v2) (k+(lookup'' m v1)) + +resolve'k :: UyM () [(Ports,Int,Ports)] +resolve'k = do { k <- getK + ; k' <- mapM (\(v1,x,v2) -> do { v1' <- getM v1 ; v2' <- getM v2 ; return (v1',x,v2') }) k + ; return k' + } + +toTikZ :: GArrowTikZ a b -> String +toTikZ g = tikZ m d 0 + where + (_,_,_,(d,k)) = runU $ do { d <- ga2diag g + ; d' <- resolve' d + ; k' <- resolve'k + ; return (d',k') } + s = sortit (strip k) + m = valuatit empty s +toTikZ' :: GArrowTikZ a b -> String +toTikZ' g = foldr (\x y -> x++"\\\\\n"++y) [] (map foo s) + where + (_,_,_,k) = runU $ ga2diag g >>= resolve' >>= \_ -> resolve'k + foo (v1,v,v2) = "x_{"++show v1++"} + "++show v++" \\leq x_{"++show v2++"} = " ++ (show $ lookup'' m v2) + s = sortit (strip k) + m = valuatit empty s + +tikz' :: (forall g a . + PGArrow g (GArrowUnit g) a -> + ( + forall b . PGArrow g (GArrowTensor g b b) b) -> + PGArrow g (GArrowUnit g) a) -> IO () +tikz' x = tikz $ unG (x (PGArrowD { unG = TikZ_const 12 }) (PGArrowD { unG = TikZ_merge }) ) +main = do putStrLn "hello" +tikz example + = do putStrLn "\\documentclass{article}" + putStrLn "\\usepackage[landscape,paperheight=20in,textwidth=19in]{geometry}" + putStrLn "\\usepackage{tikz}" + putStrLn "\\usepackage{amsmath}" + putStrLn "\\begin{document}" + putStrLn $ "\\begin{tikzpicture}[every on chain/.style={join=by ->},yscale=-1]" + putStrLn (toTikZ example) + putStrLn "\\end{tikzpicture}" +-- putStrLn "\\begin{align*}" +-- putStr (toTikZ' example) +-- putStrLn "\\end{align*}" + putStrLn "\\end{document}" diff --git a/examples/Makefile b/examples/Makefile new file mode 100644 index 0000000..e411f32 --- /dev/null +++ b/examples/Makefile @@ -0,0 +1,12 @@ +all: + ../../../inplace/bin/ghc-stage2 -dcore-lint -fforce-recomp -fcoqpass -ddump-coqpass -ddump-to-file \ + `ls *.hs | grep -v Regex | grep -v Unify.hs | grep -v GArrowTikZ.hs ` +RTS -K500M + ../../../inplace/bin/ghc-stage2 -dcore-lint -fforce-recomp \ + RegexMatcher.hs Unify.hs GArrowTikZ.hs + +tikz: + mkdir -p .build + ../../../inplace/bin/ghc-stage2 -odir .build -hidir .build GArrowTikZ.hs Unify.hs Demo.hs -o .build/demo + ./.build/demo > .build/test.tex + cd .build; pdflatex test.tex + open .build/test.pdf diff --git a/examples/Unify.dump-coqpass b/examples/Unify.dump-coqpass new file mode 100644 index 0000000..e69de29 diff --git a/examples/Unify.hs b/examples/Unify.hs new file mode 100644 index 0000000..03d16fc --- /dev/null +++ b/examples/Unify.hs @@ -0,0 +1,83 @@ +-- | A very simple unification engine; used by GArrowTikZ +module Unify(UVar, Unifier, Unifiable(..), mergeU, emptyUnifier, getU, uvarSupply, unify, resolve, occurs) +where +import Prelude hiding (lookup) +import Data.Map hiding (map) +import Data.Tree +import Data.List (nub) +import Control.Monad.Error + +-- TO DO: propagate occurs-check errors through the Unifier instead of using Prelude.error + +-- | a unification variable +newtype UVar = UVar Int + deriving (Ord, Eq) + +instance Show UVar where + show (UVar v) = "u" ++ show v + +-- | A unifier is a map from unification /variables/ to unification /values/ of type @t@. +data Unifier t = Unifier (Map UVar t) + +-- | Resolves a unification variable according to a Unifier (not recursively). +getU :: Unifier t -> UVar -> Maybe t +getU (Unifier u) v = lookup v u + +-- | An infinite supply of distinct unification variables. +uvarSupply :: [UVar] +uvarSupply = uvarSupply' 0 + where + uvarSupply' n = (UVar n):(uvarSupply' $ n+1) + +-- | The empty unifier. +emptyUnifier :: Unifier t +emptyUnifier = Unifier empty + +-- | Types for which we know how to do unification. +class Unifiable t where + + -- | Turns a @UVar@ into a @t@ + inject :: UVar -> t + + -- | Discern if a @t@ is a @UVar@. Invariant: @(project (inject x) == x)@ + project :: t -> Maybe UVar + + -- | Instances must implement this; it is called by @(unify x y)@ + -- only when both @(project x)@ and @(project y)@ are @Nothing@ + unify' :: t -> t -> Unifier t + + -- | Returns a list of all @UVars@ occurring in @t@; duplicates are okay and resolution should not be performed. + occurrences :: t -> [UVar] + +-- | Returns a list of all UVars occurring anywhere in t and any UVars which +-- occur in values unified therewith. +resolve :: Unifiable t => Unifier t -> UVar -> [UVar] +resolve (Unifier u) v | member v u = v:(concatMap (resolve (Unifier u)) $ occurrences $ u ! v) + | otherwise = [v] + +-- | The occurs check. +occurs :: Unifiable t => Unifier t -> UVar -> t -> Bool +occurs u v x = elem v $ concatMap (resolve u) (occurrences x) + +-- | Given two unifiables, find their most general unifier. Do not override this. +unify :: Unifiable t => t -> t -> Unifier t +unify v1 v2 | (Just v1') <- project v1, (Just v2') <- project v2, v1'==v2' = emptyUnifier +unify v1 v2 | (Just v1') <- project v1 = if occurs emptyUnifier v1' v2 + then error "occurs check failed" + else Unifier $ insert v1' v2 empty +unify v1 v2 | (Just v2') <- project v2 = unify v2 v1 +unify v1 v2 | _ <- project v1, _ <- project v2 = unify' v1 v2 + +-- | Merge two unifiers into a single unifier. +mergeU :: Unifiable t => Unifier t -> Unifier t -> Unifier t +mergeU (Unifier u) u' = foldr (\(k,v) -> \uacc -> mergeU' uacc k v) u' (toList u) + where + mergeU' u@(Unifier m) v1 v2 | member v1 m = mergeU u $ unify (m ! v1) v2 + | occurs u v1 v2 = error "occurs check failed" + | otherwise = Unifier $ insert v1 v2 m + +-- | Enumerates the unification variables, sorted by occurs-check. +sortU :: (Unifiable t, Eq t) => Unifier t -> [UVar] +sortU u@(Unifier um) = reverse $ nub $ concatMap (resolve u) (keys um) + + diff --git a/examples/x b/examples/x new file mode 100644 index 0000000..48cdc1c --- /dev/null +++ b/examples/x @@ -0,0 +1,1184 @@ +[3 of 3] Compiling Main ( Demo.hs, .build/Main.o ) + +==================== Desugared, before opt ==================== +@ co_aLI::() ~ () +co_aLI = TYPE () + +@ co_aHe::GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ ~ () +co_aHe = TYPE trans GArrowTikZ.TFCo:R:GArrowUnitGArrowTikZ co_aLI + +Rec { +$dGArrowSTKC_aHd + :: GHC.HetMet.GArrow.GArrowSTKC GArrowTikZ.GArrowTikZ +[LclId] +$dGArrowSTKC_aHd = GArrowTikZ.$fGArrowSTKCGArrowTikZ + +Main.foo + :: forall (t_aD6 :: * -> * -> *) t_aD7. + <[t_aD7]>@t_aD6 -> <[t_aD7]>@t_aD6 +[LclId] +Main.foo = + \ (@ t_aD6::* -> * -> *) (@ t_aD7) -> + letrec { + foo_aD5 :: <[t_aD7]>@t_aD6 -> <[t_aD7]>@t_aD6 + [LclId] + foo_aD5 = + \ (x_aD4 :: <[t_aD7]>@t_aD6) -> + GHC.HetMet.CodeTypes.hetmet_brak + @ t_aD6 + @ t_aD7 + ((GHC.HetMet.CodeTypes.hetmet_esc @ t_aD6 @ t_aD7 x_aD4) + `cast` (t_aD7 :: t_aD7 ~ t_aD7)); } in + foo_aD5 + +Main.foo' + :: forall (g_aFo :: * -> * -> *) y_aFp. + (GHC.HetMet.GArrow.GArrowSTKC g_aFo, + GHC.HetMet.GArrow.GArrowUnit g_aFo ~ ()) => + GHC.HetMet.Private.PGArrow g_aFo () y_aFp -> g_aFo () y_aFp +[LclId] +Main.foo' = + \ (@ g_aFo::* -> * -> *) + (@ y_aFp) + ($dGArrowSTKC_aFq :: GHC.HetMet.GArrow.GArrowSTKC g_aFo) + (@ co_aFr::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ ()) -> + let { + @ co_aMM::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aMM = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aML::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aML = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aMK::() ~ () + co_aMK = TYPE () } in + let { + @ co_aMJ::() ~ () + co_aMJ = TYPE () } in + let { + @ co_aMI::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aMI = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aMH::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aMH = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aMG::() ~ () + co_aMG = TYPE () } in + let { + @ co_aMF::() ~ () + co_aMF = TYPE () } in + let { + @ co_aME::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aME = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aMD::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aMD = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aMC::() ~ () + co_aMC = TYPE () } in + let { + @ co_aMB::() ~ () + co_aMB = TYPE () } in + let { + @ co_aMz::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aMz = TYPE trans co_aFr (sym co_aMB) } in + let { + @ co_aMu::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aMu = TYPE trans co_aFr (sym co_aMF) } in + let { + @ co_aMp::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aMp = TYPE trans co_aFr (sym co_aMJ) } in + let { + @ co_aM1::GHC.HetMet.GArrow.GArrowUnit g_aFo + ~ + GHC.HetMet.GArrow.GArrowUnit g_aFo + co_aM1 = TYPE GHC.HetMet.GArrow.GArrowUnit g_aFo } in + let { + @ co_aM4::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aM4 = TYPE trans (sym co_aM1) co_aFr } in + let { + @ co_aLZ::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aLZ = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aMn::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aMn = TYPE trans co_aLZ (sym co_aML) } in + let { + @ co_aMs::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aMs = TYPE trans co_aLZ (sym co_aMH) } in + let { + @ co_aMx::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aMx = TYPE trans co_aLZ (sym co_aMD) } in + let { + @ co_aLV::GHC.HetMet.GArrow.GArrowUnit g_aFo + ~ + GHC.HetMet.GArrow.GArrowUnit g_aFo + co_aLV = TYPE GHC.HetMet.GArrow.GArrowUnit g_aFo } in + let { + @ co_aM5::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aM5 = TYPE trans (sym co_aLV) co_aFr } in + let { + @ co_aLT::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aLT = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aM6::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aM6 = TYPE trans (sym co_aLT) co_aLZ } in + let { + @ co_aLP::GHC.HetMet.GArrow.GArrowUnit g_aFo + ~ + GHC.HetMet.GArrow.GArrowUnit g_aFo + co_aLP = TYPE GHC.HetMet.GArrow.GArrowUnit g_aFo } in + let { + @ co_aM7::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aM7 = TYPE trans (sym co_aLP) co_aFr } in + let { + @ co_aLN::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aLN = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aM8::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aM8 = TYPE trans (sym co_aLN) co_aLZ } in + let { + $dGArrowSwap_aLL + :: GHC.HetMet.GArrow.GArrowSwap + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrowSwap_aLL = + GHC.HetMet.GArrow.$p3GArrowSTKC @ g_aFo $dGArrowSTKC_aFq } in + let { + $dGArrow_aM2 + :: GHC.HetMet.GArrow.GArrow + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrow_aM2 = + GHC.HetMet.GArrow.$p1GArrowSwap + @ g_aFo + @ (GHC.HetMet.GArrow.GArrowTensor g_aFo) + @ (GHC.HetMet.GArrow.GArrowUnit g_aFo) + $dGArrowSwap_aLL } in + let { + $dCategory_aM3 :: Control.Category.Category g_aFo + [LclId] + $dCategory_aM3 = + GHC.HetMet.GArrow.$p1GArrow + @ g_aFo + @ (GHC.HetMet.GArrow.GArrowTensor g_aFo) + @ (GHC.HetMet.GArrow.GArrowUnit g_aFo) + $dGArrow_aM2 } in + let { + $dGArrow_aMa + :: GHC.HetMet.GArrow.GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrow_aMa = + $dGArrow_aM2 + `cast` (GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aM4 + :: GHC.HetMet.GArrow.T:GArrow + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + $dGArrowSwap_aM9 + :: GHC.HetMet.GArrow.GArrowSwap + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrowSwap_aM9 = + $dGArrowSwap_aLL + `cast` (GHC.HetMet.GArrow.T:GArrowSwap + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aM4 + :: GHC.HetMet.GArrow.T:GArrowSwap + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrowSwap + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + $dGArrowCopy_aLK + :: GHC.HetMet.GArrow.GArrowCopy + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrowCopy_aLK = + GHC.HetMet.GArrow.$p2GArrowSTKC @ g_aFo $dGArrowSTKC_aFq } in + let { + $dGArrow_aLW + :: GHC.HetMet.GArrow.GArrow + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrow_aLW = + GHC.HetMet.GArrow.$p1GArrowCopy + @ g_aFo + @ (GHC.HetMet.GArrow.GArrowTensor g_aFo) + @ (GHC.HetMet.GArrow.GArrowUnit g_aFo) + $dGArrowCopy_aLK } in + let { + $dCategory_aLX :: Control.Category.Category g_aFo + [LclId] + $dCategory_aLX = + GHC.HetMet.GArrow.$p1GArrow + @ g_aFo + @ (GHC.HetMet.GArrow.GArrowTensor g_aFo) + @ (GHC.HetMet.GArrow.GArrowUnit g_aFo) + $dGArrow_aLW } in + let { + $dGArrow_aMd + :: GHC.HetMet.GArrow.GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrow_aMd = + $dGArrow_aLW + `cast` (GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aM5 + :: GHC.HetMet.GArrow.T:GArrow + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + $dGArrow_aMe + :: GHC.HetMet.GArrow.GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrow_aMe = + $dGArrow_aMd + `cast` (GHC.HetMet.GArrow.T:GArrow g_aFo co_aM6 () + :: GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + ~ + GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + $dGArrowCopy_aMb + :: GHC.HetMet.GArrow.GArrowCopy + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrowCopy_aMb = + $dGArrowCopy_aLK + `cast` (GHC.HetMet.GArrow.T:GArrowCopy + g_aFo co_aM6 (GHC.HetMet.GArrow.GArrowUnit g_aFo) + :: GHC.HetMet.GArrow.T:GArrowCopy + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrowCopy + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo)) } in + let { + $dGArrowCopy_aMc + :: GHC.HetMet.GArrow.GArrowCopy + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrowCopy_aMc = + $dGArrowCopy_aMb + `cast` (GHC.HetMet.GArrow.T:GArrowCopy + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aM5 + :: GHC.HetMet.GArrow.T:GArrowCopy + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrowCopy + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + $dGArrowDrop_aLJ + :: GHC.HetMet.GArrow.GArrowDrop + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrowDrop_aLJ = + GHC.HetMet.GArrow.$p1GArrowSTKC @ g_aFo $dGArrowSTKC_aFq } in + let { + $dGArrow_aLQ + :: GHC.HetMet.GArrow.GArrow + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrow_aLQ = + GHC.HetMet.GArrow.$p1GArrowDrop + @ g_aFo + @ (GHC.HetMet.GArrow.GArrowTensor g_aFo) + @ (GHC.HetMet.GArrow.GArrowUnit g_aFo) + $dGArrowDrop_aLJ } in + let { + $dCategory_aLR :: Control.Category.Category g_aFo + [LclId] + $dCategory_aLR = + GHC.HetMet.GArrow.$p1GArrow + @ g_aFo + @ (GHC.HetMet.GArrow.GArrowTensor g_aFo) + @ (GHC.HetMet.GArrow.GArrowUnit g_aFo) + $dGArrow_aLQ } in + let { + $dGArrow_aMh + :: GHC.HetMet.GArrow.GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrow_aMh = + $dGArrow_aLQ + `cast` (GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aM7 + :: GHC.HetMet.GArrow.T:GArrow + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + $dGArrow_aMi + :: GHC.HetMet.GArrow.GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrow_aMi = + $dGArrow_aMh + `cast` (GHC.HetMet.GArrow.T:GArrow g_aFo co_aM8 () + :: GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + ~ + GHC.HetMet.GArrow.T:GArrow + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + $dGArrowDrop_aMf + :: GHC.HetMet.GArrow.GArrowDrop + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + [LclId] + $dGArrowDrop_aMf = + $dGArrowDrop_aLJ + `cast` (GHC.HetMet.GArrow.T:GArrowDrop + g_aFo co_aM8 (GHC.HetMet.GArrow.GArrowUnit g_aFo) + :: GHC.HetMet.GArrow.T:GArrowDrop + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrowDrop + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo)) } in + let { + $dGArrowDrop_aMg + :: GHC.HetMet.GArrow.GArrowDrop + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) () + [LclId] + $dGArrowDrop_aMg = + $dGArrowDrop_aMf + `cast` (GHC.HetMet.GArrow.T:GArrowDrop + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aM7 + :: GHC.HetMet.GArrow.T:GArrowDrop + g_aFo + (GHC.HetMet.GArrow.GArrowTensor g_aFo) + (GHC.HetMet.GArrow.GArrowUnit g_aFo) + ~ + GHC.HetMet.GArrow.T:GArrowDrop + g_aFo (GHC.HetMet.GArrow.GArrowTensor g_aFo) ()) } in + let { + @ co_aF3::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aF3 = TYPE co_aFr } in + let { + @ co_aEX::() ~ () + co_aEX = TYPE () } in + let { + @ co_aEI::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEI = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aEG::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEG = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aEF::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEF = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aEE::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aEE = + TYPE trans + (trans (GHC.HetMet.GArrow.GArrowUnit g_aFo) co_aF3) + (sym co_aEX) } in + let { + @ co_aEz::() ~ () + co_aEz = TYPE () } in + let { + @ co_aF0::() ~ () + co_aF0 = TYPE trans co_aEz co_aEX } in + let { + @ co_aEy::() ~ () + co_aEy = TYPE () } in + let { + @ co_aEx::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEx = + TYPE trans + (trans (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aEI) + (sym co_aEG) } in + let { + @ co_aEw::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEw = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aEH::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEH = TYPE trans co_aEw co_aEG } in + let { + @ co_aEv::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEv = TYPE GHC.HetMet.GArrow.GArrowTensor g_aFo } in + let { + @ co_aEu::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aEu = + TYPE trans + (trans (GHC.HetMet.GArrow.GArrowUnit g_aFo) co_aEE) + (sym co_aEz) } in + let { + @ co_aEq::() ~ () + co_aEq = TYPE () } in + let { + @ co_aEC::() ~ () + co_aEC = TYPE trans co_aEq co_aEz } in + let { + @ co_aEZ::() ~ () + co_aEZ = TYPE trans co_aEC co_aEX } in + let { + @ co_aEp::() ~ () + co_aEp = TYPE () } in + let { + @ co_aEo::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aEo = + TYPE trans + (trans (GHC.HetMet.GArrow.GArrowUnit g_aFo) co_aEu) + (sym co_aEq) } in + let { + @ co_aEl::() ~ () + co_aEl = TYPE () } in + let { + @ co_aEs::() ~ () + co_aEs = TYPE trans co_aEl co_aEq } in + let { + @ co_aED::() ~ () + co_aED = TYPE trans co_aEs co_aEz } in + let { + @ co_aF1::() ~ () + co_aF1 = TYPE trans co_aED co_aEX } in + let { + @ co_aEk::() ~ () + co_aEk = TYPE () } in + let { + @ co_aEj::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aEj = + TYPE trans + (trans (GHC.HetMet.GArrow.GArrowUnit g_aFo) co_aEo) + (sym co_aEl) } in + let { + @ co_aEh::() ~ () + co_aEh = TYPE () } in + let { + @ co_aEn::() ~ () + co_aEn = TYPE trans co_aEh co_aEl } in + let { + @ co_aEr::() ~ () + co_aEr = TYPE trans co_aEn co_aEq } in + let { + @ co_aEA::() ~ () + co_aEA = TYPE trans co_aEr co_aEz } in + let { + @ co_aEY::() ~ () + co_aEY = TYPE trans co_aEA co_aEX } in + let { + @ co_aEg::() ~ () + co_aEg = TYPE () } in + let { + @ co_aEf::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aEf = TYPE GHC.HetMet.Private.PGArrow g_aFo () y_aFp } in + let { + @ co_aEi::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aEi = + TYPE trans + co_aEf (GHC.HetMet.Private.PGArrow g_aFo co_aEh y_aFp) } in + let { + @ co_aEm::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aEm = + TYPE trans + co_aEi (GHC.HetMet.Private.PGArrow g_aFo co_aEl y_aFp) } in + let { + @ co_aEt::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aEt = + TYPE trans + co_aEm (GHC.HetMet.Private.PGArrow g_aFo co_aEq y_aFp) } in + let { + @ co_aEB::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aEB = + TYPE trans + co_aEt (GHC.HetMet.Private.PGArrow g_aFo co_aEz y_aFp) } in + let { + @ co_aF2::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aF2 = + TYPE trans + co_aEB (GHC.HetMet.Private.PGArrow g_aFo co_aEX y_aFp) } in + let { + @ co_aEd::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aEd = TYPE trans co_aEj (sym co_aEk) } in + let { + @ co_aEb::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aEb = + TYPE trans + (trans (GHC.HetMet.GArrow.GArrowTensor g_aFo) co_aEx) + (sym co_aEw) } in + let { + @ co_aE8::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aE8 = TYPE trans co_aEo (sym co_aEp) } in + let { + @ co_aE6::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aE6 = TYPE trans co_aEb (sym co_aEv) } in + let { + @ co_aE3::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aE3 = TYPE trans co_aEu (sym co_aEy) } in + let { + @ co_aE1::GHC.HetMet.GArrow.GArrowTensor g_aFo + ~ + GHC.HetMet.GArrow.GArrowTensor g_aFo + co_aE1 = TYPE trans co_aEx (sym co_aEF) } in + let { + @ co_aDD::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aDD = TYPE GHC.HetMet.Private.PGArrow g_aFo () y_aFp } in + let { + @ co_aDC::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aDC = + TYPE trans + (trans (GHC.HetMet.GArrow.GArrowUnit g_aFo) co_aEj) + (sym co_aEh) } in + let { + @ co_aDA::GHC.HetMet.Private.PGArrow g_aFo () y_aFp + ~ + GHC.HetMet.Private.PGArrow + g_aFo (GHC.HetMet.GArrow.GArrowUnit g_aFo) y_aFp + co_aDA = + TYPE trans + co_aDD + (GHC.HetMet.Private.PGArrow + g_aFo + (trans (sym co_aDC) (GHC.HetMet.GArrow.GArrowUnit g_aFo)) + y_aFp) } in + let { + @ co_aDv::GHC.HetMet.Private.PGArrow + g_aFo (GHC.HetMet.GArrow.GArrowUnit g_aFo) y_aFp + ~ + GHC.HetMet.Private.PGArrow g_aFo () y_aFp + co_aDv = TYPE sym co_aDA } in + let { + @ co_aDr::GHC.HetMet.GArrow.GArrowUnit g_aFo ~ () + co_aDr = TYPE trans co_aDC (sym co_aEg) } in + let { + $dGArrowSTKC_aDm :: GHC.HetMet.GArrow.GArrowSTKC g_aFo + [LclId] + $dGArrowSTKC_aDm = $dGArrowSTKC_aFq } in + letrec { + foo'_aDw + :: GHC.HetMet.Private.PGArrow g_aFo () y_aFp -> g_aFo () y_aFp + [LclId] + foo'_aDw = + GHC.Base.. + @ (GHC.HetMet.Private.PGArrow g_aFo () y_aFp) + @ (g_aFo () y_aFp) + @ (GHC.HetMet.Private.PGArrow g_aFo () y_aFp) + (GHC.HetMet.Private.unG @ g_aFo @ () @ y_aFp $dGArrowSTKC_aDm) + (GHC.Base.. + @ <[y_aFp]>@g_aFo + @ (GHC.HetMet.Private.PGArrow g_aFo () y_aFp) + @ (GHC.HetMet.Private.PGArrow g_aFo () y_aFp) + ((GHC.HetMet.CodeTypes.pga_flatten @ g_aFo @ GHC.Prim.Any @ y_aFp) + `cast` (<[y_aFp]>@g_aFo + -> GHC.HetMet.Private.PGArrow g_aFo co_aDr y_aFp + :: (<[y_aFp]>@g_aFo + -> GHC.HetMet.Private.PGArrow + g_aFo (GHC.HetMet.GArrow.GArrowUnit g_aFo) y_aFp) + ~ + (<[y_aFp]>@g_aFo -> GHC.HetMet.Private.PGArrow g_aFo () y_aFp))) + (GHC.Base.. + @ <[y_aFp]>@g_aFo + @ <[y_aFp]>@g_aFo + @ (GHC.HetMet.Private.PGArrow g_aFo () y_aFp) + (Main.foo @ g_aFo @ y_aFp) + ((GHC.HetMet.CodeTypes.pga_unflatten + @ g_aFo @ GHC.Prim.Any @ y_aFp) + `cast` (co_aDv -> <[y_aFp]>@g_aFo + :: (GHC.HetMet.Private.PGArrow + g_aFo (GHC.HetMet.GArrow.GArrowUnit g_aFo) y_aFp + -> <[y_aFp]>@g_aFo) + ~ + (GHC.HetMet.Private.PGArrow g_aFo () y_aFp + -> <[y_aFp]>@g_aFo))))); } in + foo'_aDw + +Main.main :: GHC.Types.IO () +[LclIdX] +Main.main = + let { + @ co_aLu::GHC.Types.Int ~ GHC.Types.Int + co_aLu = TYPE GHC.Types.Int } in + let { + @ co_aLt::GHC.Types.Int ~ GHC.Types.Int + co_aLt = TYPE GHC.Types.Int } in + letrec { + main_aHj :: GHC.Types.IO () + [LclId] + main_aHj = + GArrowTikZ.tikz + @ () + @ GHC.Types.Int + (Main.foo' + @ GArrowTikZ.GArrowTikZ + @ GHC.Types.Int + $dGArrowSTKC_aHd + @ co_aHe + (GHC.HetMet.Private.PGArrowD + @ GArrowTikZ.GArrowTikZ + @ () + @ GHC.Types.Int + (\ ($dGArrowSTKC_aHg + :: GHC.HetMet.GArrow.GArrowSTKC GArrowTikZ.GArrowTikZ) -> + let { + @ co_aHD::GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ + ~ + GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ + co_aHD = + TYPE GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ } in + let { + @ co_aHG::GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ ~ () + co_aHG = + TYPE trans (sym co_aHD) GArrowTikZ.TFCo:R:GArrowUnitGArrowTikZ } in + let { + @ co_aHB::GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + ~ + GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + co_aHB = + TYPE GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ } in + let { + @ co_aHH::GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + ~ + (,) + co_aHH = + TYPE trans + (sym co_aHB) GArrowTikZ.TFCo:R:GArrowTensorGArrowTikZ } in + let { + @ co_aHx::GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ + ~ + GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ + co_aHx = + TYPE GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ } in + let { + @ co_aLd::GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ ~ () + co_aLd = + TYPE trans (sym co_aHx) GArrowTikZ.TFCo:R:GArrowUnitGArrowTikZ } in + let { + @ co_aHv::GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + ~ + GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + co_aHv = + TYPE GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ } in + let { + @ co_aLe::GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + ~ + (,) + co_aLe = + TYPE trans + (sym co_aHv) GArrowTikZ.TFCo:R:GArrowTensorGArrowTikZ } in + let { + @ co_aHr::GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ + ~ + GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ + co_aHr = + TYPE GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ } in + let { + @ co_aLf::GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ ~ () + co_aLf = + TYPE trans (sym co_aHr) GArrowTikZ.TFCo:R:GArrowUnitGArrowTikZ } in + let { + @ co_aHp::GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + ~ + GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + co_aHp = + TYPE GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ } in + let { + @ co_aLg::GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ + ~ + (,) + co_aLg = + TYPE trans + (sym co_aHp) GArrowTikZ.TFCo:R:GArrowTensorGArrowTikZ } in + let { + $dGArrowSwap_aHn + :: GHC.HetMet.GArrow.GArrowSwap + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrowSwap_aHn = + GHC.HetMet.GArrow.$p3GArrowSTKC + @ GArrowTikZ.GArrowTikZ $dGArrowSTKC_aHg } in + let { + $dGArrow_aHE + :: GHC.HetMet.GArrow.GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrow_aHE = + GHC.HetMet.GArrow.$p1GArrowSwap + @ GArrowTikZ.GArrowTikZ + @ (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + @ (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + $dGArrowSwap_aHn } in + let { + $dCategory_aHF :: Control.Category.Category GArrowTikZ.GArrowTikZ + [LclId] + $dCategory_aHF = + GHC.HetMet.GArrow.$p1GArrow + @ GArrowTikZ.GArrowTikZ + @ (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + @ (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + $dGArrow_aHE } in + let { + $dGArrow_aLj + :: GHC.HetMet.GArrow.GArrow + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrow_aLj = + $dGArrow_aHE + `cast` (GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + co_aHH + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + :: GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ)) } in + let { + $dGArrow_aLk + :: GHC.HetMet.GArrow.GArrow GArrowTikZ.GArrowTikZ (,) () + [LclId] + $dGArrow_aLk = + $dGArrow_aLj + `cast` (GHC.HetMet.GArrow.T:GArrow GArrowTikZ.GArrowTikZ (,) co_aHG + :: GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrow GArrowTikZ.GArrowTikZ (,) ()) } in + let { + $dGArrowSwap_aLh + :: GHC.HetMet.GArrow.GArrowSwap + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + () + [LclId] + $dGArrowSwap_aLh = + $dGArrowSwap_aHn + `cast` (GHC.HetMet.GArrow.T:GArrowSwap + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + co_aHG + :: GHC.HetMet.GArrow.T:GArrowSwap + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrowSwap + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + ()) } in + let { + $dGArrowSwap_aLi + :: GHC.HetMet.GArrow.GArrowSwap GArrowTikZ.GArrowTikZ (,) () + [LclId] + $dGArrowSwap_aLi = + $dGArrowSwap_aLh + `cast` (GHC.HetMet.GArrow.T:GArrowSwap + GArrowTikZ.GArrowTikZ co_aHH () + :: GHC.HetMet.GArrow.T:GArrowSwap + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + () + ~ + GHC.HetMet.GArrow.T:GArrowSwap GArrowTikZ.GArrowTikZ (,) ()) } in + let { + $dGArrowCopy_aHm + :: GHC.HetMet.GArrow.GArrowCopy + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrowCopy_aHm = + GHC.HetMet.GArrow.$p2GArrowSTKC + @ GArrowTikZ.GArrowTikZ $dGArrowSTKC_aHg } in + let { + $dGArrow_aHy + :: GHC.HetMet.GArrow.GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrow_aHy = + GHC.HetMet.GArrow.$p1GArrowCopy + @ GArrowTikZ.GArrowTikZ + @ (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + @ (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + $dGArrowCopy_aHm } in + let { + $dCategory_aHz :: Control.Category.Category GArrowTikZ.GArrowTikZ + [LclId] + $dCategory_aHz = + GHC.HetMet.GArrow.$p1GArrow + @ GArrowTikZ.GArrowTikZ + @ (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + @ (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + $dGArrow_aHy } in + let { + $dGArrow_aLn + :: GHC.HetMet.GArrow.GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + () + [LclId] + $dGArrow_aLn = + $dGArrow_aHy + `cast` (GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + co_aLd + :: GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + ()) } in + let { + $dGArrow_aLo + :: GHC.HetMet.GArrow.GArrow GArrowTikZ.GArrowTikZ (,) () + [LclId] + $dGArrow_aLo = + $dGArrow_aLn + `cast` (GHC.HetMet.GArrow.T:GArrow GArrowTikZ.GArrowTikZ co_aLe () + :: GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + () + ~ + GHC.HetMet.GArrow.T:GArrow GArrowTikZ.GArrowTikZ (,) ()) } in + let { + $dGArrowCopy_aLl + :: GHC.HetMet.GArrow.GArrowCopy + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrowCopy_aLl = + $dGArrowCopy_aHm + `cast` (GHC.HetMet.GArrow.T:GArrowCopy + GArrowTikZ.GArrowTikZ + co_aLe + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + :: GHC.HetMet.GArrow.T:GArrowCopy + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrowCopy + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ)) } in + let { + $dGArrowCopy_aLm + :: GHC.HetMet.GArrow.GArrowCopy GArrowTikZ.GArrowTikZ (,) () + [LclId] + $dGArrowCopy_aLm = + $dGArrowCopy_aLl + `cast` (GHC.HetMet.GArrow.T:GArrowCopy + GArrowTikZ.GArrowTikZ (,) co_aLd + :: GHC.HetMet.GArrow.T:GArrowCopy + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrowCopy GArrowTikZ.GArrowTikZ (,) ()) } in + let { + $dGArrowDrop_aHl + :: GHC.HetMet.GArrow.GArrowDrop + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrowDrop_aHl = + GHC.HetMet.GArrow.$p1GArrowSTKC + @ GArrowTikZ.GArrowTikZ $dGArrowSTKC_aHg } in + let { + $dGArrow_aHs + :: GHC.HetMet.GArrow.GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrow_aHs = + GHC.HetMet.GArrow.$p1GArrowDrop + @ GArrowTikZ.GArrowTikZ + @ (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + @ (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + $dGArrowDrop_aHl } in + let { + $dCategory_aHt :: Control.Category.Category GArrowTikZ.GArrowTikZ + [LclId] + $dCategory_aHt = + GHC.HetMet.GArrow.$p1GArrow + @ GArrowTikZ.GArrowTikZ + @ (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + @ (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + $dGArrow_aHs } in + let { + $dGArrow_aLr + :: GHC.HetMet.GArrow.GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + () + [LclId] + $dGArrow_aLr = + $dGArrow_aHs + `cast` (GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + co_aLf + :: GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + ()) } in + let { + $dGArrow_aLs + :: GHC.HetMet.GArrow.GArrow GArrowTikZ.GArrowTikZ (,) () + [LclId] + $dGArrow_aLs = + $dGArrow_aLr + `cast` (GHC.HetMet.GArrow.T:GArrow GArrowTikZ.GArrowTikZ co_aLg () + :: GHC.HetMet.GArrow.T:GArrow + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + () + ~ + GHC.HetMet.GArrow.T:GArrow GArrowTikZ.GArrowTikZ (,) ()) } in + let { + $dGArrowDrop_aLp + :: GHC.HetMet.GArrow.GArrowDrop + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + [LclId] + $dGArrowDrop_aLp = + $dGArrowDrop_aHl + `cast` (GHC.HetMet.GArrow.T:GArrowDrop + GArrowTikZ.GArrowTikZ + co_aLg + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + :: GHC.HetMet.GArrow.T:GArrowDrop + GArrowTikZ.GArrowTikZ + (GHC.HetMet.GArrow.GArrowTensor GArrowTikZ.GArrowTikZ) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrowDrop + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ)) } in + let { + $dGArrowDrop_aLq + :: GHC.HetMet.GArrow.GArrowDrop GArrowTikZ.GArrowTikZ (,) () + [LclId] + $dGArrowDrop_aLq = + $dGArrowDrop_aLp + `cast` (GHC.HetMet.GArrow.T:GArrowDrop + GArrowTikZ.GArrowTikZ (,) co_aLf + :: GHC.HetMet.GArrow.T:GArrowDrop + GArrowTikZ.GArrowTikZ + (,) + (GHC.HetMet.GArrow.GArrowUnit GArrowTikZ.GArrowTikZ) + ~ + GHC.HetMet.GArrow.T:GArrowDrop GArrowTikZ.GArrowTikZ (,) ()) } in + let { + @ co_aHh::GHC.Types.Int ~ GHC.Types.Int + co_aHh = TYPE sym co_aLt } in + (GArrowTikZ.$WTikZ_const (GHC.Types.I# 12)) + `cast` (GArrowTikZ.GArrowTikZ () co_aHh + :: GArrowTikZ.GArrowTikZ () GHC.Types.Int + ~ + GArrowTikZ.GArrowTikZ () GHC.Types.Int)))); } in + main_aHj + +:Main.main :: GHC.Types.IO () +[LclIdX] +:Main.main = GHC.TopHandler.runMainIO @ () Main.main +end Rec } + + + + +==================== Desugar ==================== +Main.foo + :: forall (tv_~N6 :: * -> * -> *) tv_~N7. + GHC.HetMet.Private.PGArrow + tv_~N6 (GHC.HetMet.GArrow.GArrowUnit tv_~N6) tv_~N7 + -> GHC.HetMet.Private.PGArrow + tv_~N6 (GHC.HetMet.GArrow.GArrowUnit tv_~N6) tv_~N7 +[LclId] +Main.foo = + \ (@ tv_~N6::* -> * -> *) + (@ tv_~N7) + (ev_~N8 + :: GHC.HetMet.Private.PGArrow + tv_~N6 (GHC.HetMet.GArrow.GArrowUnit tv_~N6) tv_~N7) -> + let { + ev_~N9 + :: GHC.HetMet.Private.PGArrow + tv_~N6 + (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + [LclId] + ev_~N9 = + GHC.HetMet.Private.pga_id + @ tv_~N6 @ (GHC.HetMet.GArrow.GArrowUnit tv_~N6) } in + let { + ev_~Na + :: GHC.HetMet.Private.PGArrow + tv_~N6 (GHC.HetMet.GArrow.GArrowUnit tv_~N6) tv_~N7 + [LclId] + ev_~Na = + let { + ev_~Nh + :: GHC.HetMet.Private.PGArrow + tv_~N6 + (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + [LclId] + ev_~Nh = + GHC.HetMet.Private.pga_id + @ tv_~N6 @ (GHC.HetMet.GArrow.GArrowUnit tv_~N6) } in + let { + ev_~Ni + :: GHC.HetMet.Private.PGArrow + tv_~N6 (GHC.HetMet.GArrow.GArrowUnit tv_~N6) tv_~N7 + [LclId] + ev_~Ni = ev_~N8 } in + let { + ev_~Nj + :: GHC.HetMet.Private.PGArrow + tv_~N6 + (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + [LclId] + ev_~Nj = + GHC.HetMet.Private.pga_drop + @ tv_~N6 @ (GHC.HetMet.GArrow.GArrowUnit tv_~N6) } in + GHC.HetMet.Private.pga_comp + @ tv_~N6 + @ (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + @ (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + @ tv_~N7 + ev_~Nj + ev_~Ni } in + GHC.HetMet.Private.pga_comp + @ tv_~N6 + @ (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + @ (GHC.HetMet.GArrow.GArrowUnit tv_~N6) + @ tv_~N7 + ev_~N9 + ev_~Na + +Main.maincoercionKind + base:GHC.HetMet.GArrow.GArrowUnit{tc 02y} + main:GArrowTikZ.GArrowTikZ{tc roV} + ~ + ghc-prim:GHC.Unit.(){(w) tc 40} +ghc-stage2: coreTypeToWeakType + hit a bare EqPred diff --git a/src/All.v b/src/All.v index d4956dd..9443bbb 100644 --- a/src/All.v +++ b/src/All.v @@ -1,4 +1,7 @@ Require Import ExtractionMain. +Require Import HaskProgrammingLanguage. +Require Import PCF. +Require Import HaskFlattener. Require Import ProgrammingLanguageArrow. Require Import ProgrammingLanguageReification. Require Import ProgrammingLanguageFlattening. diff --git a/src/Extraction-prefix.hs b/src/Extraction-prefix.hs index fbe22cb..f11e63b 100644 --- a/src/Extraction-prefix.hs +++ b/src/Extraction-prefix.hs @@ -1,3 +1,4 @@ +{-# OPTIONS_GHC -fno-warn-unused-matches -fno-warn-unused-binds -fno-warn-unused-imports #-} module CoqPass ( coqPassCoreToString, coqPassCoreToCore ) where import qualified Unique @@ -28,11 +29,9 @@ import qualified Data.List import qualified Data.Ord import qualified Data.Typeable import Data.Bits ((.&.), shiftL, (.|.)) -import Prelude ( (++), (+), (==), Show, show, Char, (.), ($) ) +import Prelude ( (++), (+), (==), Show, show, (.), ($) ) import qualified Prelude -import qualified Debug.Trace import qualified GHC.Base -import qualified System.IO import qualified System.IO.Unsafe getTyConTyVars :: TyCon.TyCon -> [Var.TyVar] @@ -54,21 +53,32 @@ sortAlts x = Data.List.sortBy (\a b -> if a `CoreSyn.ltAlt` b then Data.Ord.LT e coreVarToWeakVar :: Var.Var -> WeakVar coreVarToWeakVar v | Id.isId v = WExprVar (WeakExprVar v (errOrFail (coreTypeToWeakType (Var.varType v)))) coreVarToWeakVar v | Var.isTyVar v = WTypeVar (WeakTypeVar v (coreKindToKind (Var.varType v))) -coreVarToWeakVar v | Var.isCoVar v = WCoerVar (WeakCoerVar v (Prelude.error "FIXME") - (Prelude.error "FIXME") (Prelude.error "FIXME")) +coreVarToWeakVar v | Var.isCoVar v + = WCoerVar (WeakCoerVar v + (errOrFail (coreTypeToWeakType (Prelude.fst (Coercion.coercionKind (Var.varType v))))) + (errOrFail (coreTypeToWeakType (Prelude.snd (Coercion.coercionKind (Var.varType v)))))) coreVarToWeakVar _ = Prelude.error "Var.Var that is neither an expression variable, type variable, nor coercion variable!" +errOrFail :: OrError t -> t errOrFail (OK x) = x errOrFail (Error s) = Prelude.error s +rawTyFunKind :: TyCon.TyCon -> ( [Kind] , Kind ) +rawTyFunKind tc = ((Prelude.map coreKindToKind (Prelude.take (TyCon.tyConArity tc) argk)) + , + coreKindToKind (Coercion.mkArrowKinds (Prelude.drop (TyCon.tyConArity tc) argk) retk)) + where (argk,retk) = Coercion.splitKindFunTys (TyCon.tyConKind tc) + tyConOrTyFun :: TyCon.TyCon -> Prelude.Either TyCon.TyCon TyCon.TyCon tyConOrTyFun n = if n == TysPrim.statePrimTyCon -- special-purpose hack treat State# as a type family since it has kind *->* but no tyvars then Prelude.Right n else if TyCon.isFamInstTyCon n then Prelude.Right n - else Prelude.Left n + else if TyCon.isSynTyCon n + then Prelude.Right n + else Prelude.Left n nat2int :: Nat -> Prelude.Int nat2int O = 0 @@ -87,8 +97,9 @@ sanitizeForLatex (c:x) = c:(sanitizeForLatex x) kindToCoreKind :: Kind -> TypeRep.Kind kindToCoreKind KindStar = TypeRep.liftedTypeKind kindToCoreKind (KindArrow k1 k2) = Coercion.mkArrowKind (kindToCoreKind k1) (kindToCoreKind k2) -kindToCoreKind _ = Prelude.error "kindToCoreKind does not know how to handle that" - +kindToCoreKind k = Prelude.error ((Prelude.++) + "kindToCoreKind does not know how to handle kind " + (kindToString k)) coreKindToKind :: TypeRep.Kind -> Kind coreKindToKind k = case Coercion.splitKindFunTy_maybe k of @@ -113,7 +124,7 @@ coreKindToKind k = else Prelude.error ((Prelude.++) "coreKindToKind got an unknown kind: " (Outputable.showSDoc (Outputable.ppr k))) outputableToString :: Outputable.Outputable a => a -> Prelude.String -outputableToString = (\x -> Outputable.showSDoc (Outputable.ppr x)) +outputableToString = (\x -> Outputable.showSDocDebug (Outputable.ppr x)) coreViewDeep :: Type.Type -> Type.Type coreViewDeep t = @@ -178,17 +189,19 @@ weakCoercionToCoreCoercion :: CoreCoercion -> Type.Type | WCoUnsafe t1 t2 => (t1,t2) -} +{-# NOINLINE trace #-} +trace :: Prelude.String -> a -> a +trace msg x = x --trace = Debug.Trace.trace --trace msg x = x -trace msg x = System.IO.Unsafe.unsafePerformIO $ Prelude.return x -{- -trace s x = x -trace msg x = System.IO.Unsafe.unsafePerformIO $ - (Prelude.>>=) (System.IO.hPutStrLn System.IO.stdout msg) (\_ -> Prelude.return x) -trace msg x = System.IO.Unsafe.unsafePerformIO $ - (Prelude.>>=) (System.IO.hPutStr System.IO.stdout " ") (\_ -> Prelude.return x) --} +--trace msg x = System.IO.Unsafe.unsafePerformIO $ Prelude.return x +--trace s x = x +--trace msg x = System.IO.Unsafe.unsafePerformIO $ +-- (Prelude.>>=) (System.IO.hPutStrLn System.IO.stdout msg) (\_ -> Prelude.return x) +--trace msg x = System.IO.Unsafe.unsafePerformIO $ +-- (Prelude.>>=) (System.IO.hPutStr System.IO.stdout " ") (\_ -> Prelude.return x) + {- -- used for extracting strings WITHOUT the patch for Coq bin2ascii = diff --git a/src/ExtractionMain.v b/src/ExtractionMain.v index 49be891..5be5280 100644 --- a/src/ExtractionMain.v +++ b/src/ExtractionMain.v @@ -15,7 +15,8 @@ Require Import General. Require Import NaturalDeduction. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskCoreVars. Require Import HaskCoreTypes. Require Import HaskCore. @@ -33,8 +34,7 @@ Require Import HaskStrongToWeak. Require Import HaskWeakToCore. Require Import HaskProofToStrong. -(*Require Import HaskProofFlattener.*) -(*Require Import HaskProofStratified.*) +Require Import HaskFlattener. Open Scope string_scope. Extraction Language Haskell. @@ -101,13 +101,11 @@ Section core2proof. | WCoerVar _ => Prelude_error "top-level xi got a coercion variable" end. - Definition header : string := "\documentclass{article}"+++eol+++ "\usepackage{amsmath}"+++eol+++ "\usepackage{amssymb}"+++eol+++ "\usepackage{proof}"+++eol+++ -(* "\usepackage{mathpartir} % http://cristal.inria.fr/~remy/latex/"+++eol+++*) "\usepackage{trfrac} % http://www.utdallas.edu/~hamlen/trfrac.sty"+++eol+++ "\def\code#1#2{\Box_{#1} #2}"+++eol+++ "\usepackage[paperwidth=\maxdimen,paperheight=\maxdimen]{geometry}"+++eol+++ @@ -163,13 +161,16 @@ Section core2proof. Definition mkWeakTypeVar (u:Unique)(k:Kind) : WeakTypeVar := weakTypeVar (mkTyVar (mkSystemName u "tv" O) k) k. Definition mkWeakCoerVar (u:Unique)(k:Kind)(t1 t2:WeakType) : WeakCoerVar := - weakCoerVar (mkCoVar (mkSystemName u "cv" O) (weakTypeToCoreType t1) (weakTypeToCoreType t2)) k t1 t2. + weakCoerVar (mkCoVar (mkSystemName u "cv" O) (weakTypeToCoreType t1) (weakTypeToCoreType t2)) t1 t2. Definition mkWeakExprVar (u:Unique)(t:WeakType) : WeakExprVar := weakExprVar (mkExVar (mkSystemName u "ev" O) (weakTypeToCoreType t)) t. - Context (hetmet_brak : WeakExprVar). - Context (hetmet_esc : WeakExprVar). - Context (uniqueSupply : UniqSupply). + Context (hetmet_brak : WeakExprVar). + Context (hetmet_esc : WeakExprVar). + Context (hetmet_flatten : WeakExprVar). + Context (hetmet_unflatten : WeakExprVar). + Context (hetmet_flattened_id : WeakExprVar). + Context (uniqueSupply : UniqSupply). Definition useUniqueSupply {T}(ut:UniqM T) : ???T := match ut with @@ -223,6 +224,199 @@ Section core2proof. apply t. Defined. + End CoreToCore. + + Definition coreVarToWeakExprVarOrError cv := + match coreVarToWeakVar cv with + | WExprVar wv => wv + | _ => Prelude_error "IMPOSSIBLE" + end. + + Definition curry {Γ}{Δ}{a}{s}{Σ}{lev} : + ND Rule + [ Γ > Δ > Σ |- [a ---> s @@ lev ] ] + [ Γ > Δ > Σ,,[a @@ lev] |- [ s @@ lev ] ]. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RExch ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply (@RApp Γ Δ [a@@lev] Σ a s lev) ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod. + apply nd_rule. + apply RVar. + apply nd_id. + Defined. + + Definition fToC1 {Γ}{Δ}{a}{s}{lev} : + ND Rule [] [ Γ > Δ > [ ] |- [a ---> s @@ lev ] ] -> + ND Rule [] [ Γ > Δ > [a @@ lev] |- [ s @@ lev ] ]. + intro pf. + eapply nd_comp. + apply pf. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RCanL ]. + apply curry. + Defined. + + Definition fToC2 {Γ}{Δ}{a1}{a2}{s}{lev} : + ND Rule [] [ Γ > Δ > [] |- [a1 ---> (a2 ---> s) @@ lev ] ] -> + ND Rule [] [ Γ > Δ > [a1 @@ lev],,[a2 @@ lev] |- [ s @@ lev ] ]. + intro pf. + eapply nd_comp. + eapply pf. + clear pf. + eapply nd_comp. + eapply curry. + eapply nd_comp. + eapply nd_rule. + eapply RArrange. + eapply RCanL. + apply curry. + Defined. + + Section coqPassCoreToCore. + Context + (hetmet_brak : CoreVar) + (hetmet_esc : CoreVar) + (hetmet_flatten : CoreVar) + (hetmet_unflatten : CoreVar) + (hetmet_flattened_id : CoreVar) + (uniqueSupply : UniqSupply) + (lbinds:list (@CoreBind CoreVar)) + (hetmet_PGArrowTyCon : TyFun) + (hetmet_PGArrow_unit_TyCon : TyFun) + (hetmet_PGArrow_tensor_TyCon : TyFun) + (hetmet_PGArrow_exponent_TyCon : TyFun) + (hetmet_pga_id : CoreVar) + (hetmet_pga_comp : CoreVar) + (hetmet_pga_first : CoreVar) + (hetmet_pga_second : CoreVar) + (hetmet_pga_cancell : CoreVar) + (hetmet_pga_cancelr : CoreVar) + (hetmet_pga_uncancell : CoreVar) + (hetmet_pga_uncancelr : CoreVar) + (hetmet_pga_assoc : CoreVar) + (hetmet_pga_unassoc : CoreVar) + (hetmet_pga_copy : CoreVar) + (hetmet_pga_drop : CoreVar) + (hetmet_pga_swap : CoreVar) + (hetmet_pga_applyl : CoreVar) + (hetmet_pga_applyr : CoreVar) + (hetmet_pga_curryl : CoreVar) + (hetmet_pga_curryr : CoreVar) + . + + + Definition ga_unit TV (ec:RawHaskType TV ECKind) : RawHaskType TV ★ := + @TyFunApp TV hetmet_PGArrow_unit_TyCon (ECKind::nil) ★ (TyFunApp_cons _ _ ec TyFunApp_nil). + + Definition ga_prod TV (ec:RawHaskType TV ECKind) (a b:RawHaskType TV ★) : RawHaskType TV ★ := + (@TyFunApp TV + hetmet_PGArrow_tensor_TyCon + (ECKind::★ ::★ ::nil) ★ + (TyFunApp_cons _ _ ec + (TyFunApp_cons _ _ a + (TyFunApp_cons _ _ b + TyFunApp_nil)))). + + Definition ga_type {TV}(a:RawHaskType TV ECKind)(b c:RawHaskType TV ★) : RawHaskType TV ★ := + TApp (TApp (TApp (@TyFunApp TV + hetmet_PGArrowTyCon + nil _ TyFunApp_nil) a) b) c. + + Definition ga := @ga_mk ga_unit ga_prod (@ga_type). + + Definition ga_type' {Γ}(a:HaskType Γ ECKind)(b c:HaskType Γ ★) : HaskType Γ ★ := + fun TV ite => TApp (TApp (TApp (@TyFunApp TV + hetmet_PGArrowTyCon + nil _ TyFunApp_nil) (a TV ite)) (b TV ite)) (c TV ite). + + Definition mkGlob2' {Γ}{κ₁}{κ₂}(f:HaskType Γ κ₁ -> HaskType Γ κ₂ -> HaskType Γ ★) : + IList Kind (fun κ : Kind => HaskType Γ κ) (κ₁::κ₂::nil) -> HaskType Γ ★. + intros. + inversion X; subst. + inversion X1; subst. + apply f; auto. + Defined. + + Definition mkGlob2 {Γ}{Δ}{l}{κ₁}{κ₂}(cv:CoreVar)(f:HaskType Γ κ₁ -> HaskType Γ κ₂ -> HaskType Γ ★) x y + : ND Rule [] [ Γ > Δ > [] |- [f x y @@ l] ]. + apply nd_rule. + refine (@RGlobal Γ Δ l + {| glob_wv := coreVarToWeakExprVarOrError cv + ; glob_kinds := κ₁ :: κ₂ :: nil + ; glob_tf := mkGlob2'(Γ:=Γ) f + |} (ICons _ _ x (ICons _ _ y INil))). + Defined. + + Definition mkGlob3' {Γ}{κ₁}{κ₂}{κ₃}(f:HaskType Γ κ₁ -> HaskType Γ κ₂ -> HaskType Γ κ₃ -> HaskType Γ ★) : + IList Kind (fun κ : Kind => HaskType Γ κ) (κ₁::κ₂::κ₃::nil) -> HaskType Γ ★. + intros. + inversion X; subst. + inversion X1; subst. + inversion X3; subst. + apply f; auto. + Defined. + + Definition mkGlob3 {Γ}{Δ}{l}{κ₁}{κ₂}{κ₃}(cv:CoreVar)(f:HaskType Γ κ₁ -> HaskType Γ κ₂ -> HaskType Γ κ₃ -> HaskType Γ ★) x y z + : ND Rule [] [ Γ > Δ > [] |- [f x y z @@ l] ]. + apply nd_rule. + refine (@RGlobal Γ Δ l + {| glob_wv := coreVarToWeakExprVarOrError cv + ; glob_kinds := κ₁ :: κ₂ :: κ₃ :: nil + ; glob_tf := mkGlob3'(Γ:=Γ) f + |} (ICons _ _ x (ICons _ _ y (ICons _ _ z INil)))). + Defined. + + Definition mkGlob4' {Γ}{κ₁}{κ₂}{κ₃}{κ₄}(f:HaskType Γ κ₁ -> HaskType Γ κ₂ -> HaskType Γ κ₃ -> HaskType Γ κ₄ -> HaskType Γ ★) : + IList Kind (fun κ : Kind => HaskType Γ κ) (κ₁::κ₂::κ₃::κ₄::nil) -> HaskType Γ ★. + intros. + inversion X; subst. + inversion X1; subst. + inversion X3; subst. + inversion X5; subst. + apply f; auto. + Defined. + + Definition mkGlob4 {Γ}{Δ}{l}{κ₁}{κ₂}{κ₃}{κ₄}(cv:CoreVar)(f:HaskType Γ κ₁ -> HaskType Γ κ₂ -> HaskType Γ κ₃ -> HaskType Γ κ₄ -> HaskType Γ ★) x y z q + : ND Rule [] [ Γ > Δ > [] |- [f x y z q @@ l] ]. + apply nd_rule. + refine (@RGlobal Γ Δ l + {| glob_wv := coreVarToWeakExprVarOrError cv + ; glob_kinds := κ₁ :: κ₂ :: κ₃ :: κ₄ :: nil + ; glob_tf := mkGlob4'(Γ:=Γ) f + |} (ICons _ _ x (ICons _ _ y (ICons _ _ z (ICons _ _ q INil))))). + Defined. + + Definition gat {Γ} ec (x:Tree ??(HaskType Γ ★)) := @ga_mk_tree ga_unit ga_prod _ ec x. + + Instance my_ga : garrow ga_unit ga_prod (@ga_type) := + { ga_id := fun Γ Δ ec l a => mkGlob2 hetmet_pga_id (fun ec a => ga_type' ec a a) ec (gat ec a) + ; ga_cancelr := fun Γ Δ ec l a => mkGlob2 hetmet_pga_cancelr (fun ec a => ga_type' ec _ a) ec (gat ec a) + ; ga_cancell := fun Γ Δ ec l a => mkGlob2 hetmet_pga_cancell (fun ec a => ga_type' ec _ a) ec (gat ec a) + ; ga_uncancelr := fun Γ Δ ec l a => mkGlob2 hetmet_pga_uncancelr (fun ec a => ga_type' ec a _) ec (gat ec a) + ; ga_uncancell := fun Γ Δ ec l a => mkGlob2 hetmet_pga_uncancell (fun ec a => ga_type' ec a _) ec (gat ec a) + ; ga_assoc := fun Γ Δ ec l a b c => mkGlob4 hetmet_pga_assoc (fun ec a b c => ga_type' ec _ _) ec (gat ec a) (gat ec b) (gat ec c) + ; ga_unassoc := fun Γ Δ ec l a b c => mkGlob4 hetmet_pga_unassoc (fun ec a b c => ga_type' ec _ _) ec (gat ec a) (gat ec b) (gat ec c) + ; ga_swap := fun Γ Δ ec l a b => mkGlob3 hetmet_pga_swap (fun ec a b => ga_type' ec _ _) ec (gat ec a) (gat ec b) + ; ga_drop := fun Γ Δ ec l a => mkGlob2 hetmet_pga_drop (fun ec a => ga_type' ec _ _) ec (gat ec a) + ; ga_copy := fun Γ Δ ec l a => mkGlob2 hetmet_pga_copy (fun ec a => ga_type' ec _ _) ec (gat ec a) + ; ga_first := fun Γ Δ ec l a b x => fToC1 (mkGlob4 hetmet_pga_first (fun ec a b c => _) ec (gat ec a) (gat ec b) (gat ec x)) + ; ga_second := fun Γ Δ ec l a b x => fToC1 (mkGlob4 hetmet_pga_second (fun ec a b c => _) ec (gat ec a) (gat ec b) (gat ec x)) + ; ga_comp := fun Γ Δ ec l a b c => fToC2 (mkGlob4 hetmet_pga_comp (fun ec a b c => _) ec (gat ec a) (gat ec b) (gat ec c)) +(* ; ga_lit := fun Γ Δ ec l a => nd_rule (RGlobal _ _ _ _ (coreVarToWeakExprVarOrError hetmet_pga_lit))*) +(* ; ga_curry := fun Γ Δ ec l a => nd_rule (RGlobal _ _ _ _ (coreVarToWeakExprVarOrError hetmet_pga_curry))*) +(* ; ga_apply := fun Γ Δ ec l a => nd_rule (RGlobal _ _ _ _ (coreVarToWeakExprVarOrError hetmet_pga_apply))*) +(* ; ga_kappa := fun Γ Δ ec l a => fToC1 (nd_rule (RGlobal _ _ _ _ (coreVarToWeakExprVarOrError hetmet_pga_kappa)))*) + ; ga_lit := fun Γ Δ ec l a => Prelude_error "ga_lit" + ; ga_curry := fun Γ Δ ec l a b c => Prelude_error "ga_curry" + ; ga_apply := fun Γ Δ ec l a b c => Prelude_error "ga_apply" + ; ga_kappa := fun Γ Δ ec l a => Prelude_error "ga_kappa" + }. + + Definition hetmet_brak' := coreVarToWeakExprVarOrError hetmet_brak. + Definition hetmet_esc' := coreVarToWeakExprVarOrError hetmet_esc. + Definition hetmet_flatten' := coreVarToWeakExprVarOrError hetmet_flatten. + Definition hetmet_unflatten' := coreVarToWeakExprVarOrError hetmet_unflatten. + Definition hetmet_flattened_id' := coreVarToWeakExprVarOrError hetmet_flattened_id. + Definition coreToCoreExpr' (ce:@CoreExpr CoreVar) : ???(@CoreExpr CoreVar) := addErrorMessage ("input CoreSyn: " +++ toString ce) (addErrorMessage ("input CoreType: " +++ toString (coreTypeOfCoreExpr ce)) ( @@ -236,12 +430,14 @@ Section core2proof. ((weakExprToStrongExpr Γ Δ φ ψ ξ (fun _ => true) τ nil we) >>= fun e => (addErrorMessage ("HaskStrong...") - (let haskProof := @expr2proof _ _ _ _ _ _ e + (let haskProof := skolemize_and_flatten_proof hetmet_flatten' hetmet_unflatten' + hetmet_flattened_id' my_ga (@expr2proof _ _ _ _ _ _ e) in (* insert HaskProof-to-HaskProof manipulations here *) OK ((@proof2expr nat _ FreshNat _ _ _ _ (fun _ => Prelude_error "unbound unique") _ haskProof) O) >>= fun e' => (snd e') >>= fun e'' => - strongExprToWeakExpr hetmet_brak hetmet_esc mkWeakTypeVar mkWeakCoerVar mkWeakExprVar uniqueSupply + strongExprToWeakExpr hetmet_brak' hetmet_esc' + mkWeakTypeVar mkWeakCoerVar mkWeakExprVar uniqueSupply (projT2 e'') INil >>= fun q => OK (weakExprToCoreExpr q) @@ -252,29 +448,88 @@ Section core2proof. | OK x => x | Error s => Prelude_error s end. - + Definition coreToCoreBind (binds:@CoreBind CoreVar) : @CoreBind CoreVar := match binds with - | CoreNonRec v e => CoreNonRec v (coreToCoreExpr e) + | CoreNonRec v e => let e' := coreToCoreExpr e in CoreNonRec (setVarType v (coreTypeOfCoreExpr e')) e' + | CoreRec lbe => CoreRec (map (fun ve => match ve with (v,e) => (v,coreToCoreExpr e) end) lbe) + (* FIXME: doesn't deal with the case where top level recursive binds change type *) +(* + match coreToCoreExpr (CoreELet lbe) (CoreELit HaskMachNullAddr) with + | CoreELet (CoreRec lbe') => lbe' + | x => Prelude_error + ("coreToCoreExpr was given a letrec, " +++ + "but returned something that wasn't a letrec: " +++ toString x) + end +*) end. - + Definition coqPassCoreToCore' (lbinds:list (@CoreBind CoreVar)) : list (@CoreBind CoreVar) := map coreToCoreBind lbinds. - End CoreToCore. + End coqPassCoreToCore. - Definition coqPassCoreToCore + Definition coqPassCoreToCore (hetmet_brak : CoreVar) (hetmet_esc : CoreVar) + (hetmet_flatten : CoreVar) + (hetmet_unflatten : CoreVar) + (hetmet_flattened_id : CoreVar) (uniqueSupply : UniqSupply) - (lbinds:list (@CoreBind CoreVar)) : list (@CoreBind CoreVar) := - match coreVarToWeakVar hetmet_brak with - | WExprVar hetmet_brak' => match coreVarToWeakVar hetmet_esc with - | WExprVar hetmet_esc' => coqPassCoreToCore' hetmet_brak' hetmet_esc' uniqueSupply lbinds - | _ => Prelude_error "IMPOSSIBLE" - end - | _ => Prelude_error "IMPOSSIBLE" - end. + (lbinds:list (@CoreBind CoreVar)) + (hetmet_PGArrowTyCon : TyFun) + (hetmet_PGArrow_unit_TyCon : TyFun) + (hetmet_PGArrow_tensor_TyCon : TyFun) + (hetmet_PGArrow_exponent_TyCon : TyFun) + (hetmet_pga_id : CoreVar) + (hetmet_pga_comp : CoreVar) + (hetmet_pga_first : CoreVar) + (hetmet_pga_second : CoreVar) + (hetmet_pga_cancell : CoreVar) + (hetmet_pga_cancelr : CoreVar) + (hetmet_pga_uncancell : CoreVar) + (hetmet_pga_uncancelr : CoreVar) + (hetmet_pga_assoc : CoreVar) + (hetmet_pga_unassoc : CoreVar) + (hetmet_pga_copy : CoreVar) + (hetmet_pga_drop : CoreVar) + (hetmet_pga_swap : CoreVar) + (hetmet_pga_applyl : CoreVar) + (hetmet_pga_applyr : CoreVar) + (hetmet_pga_curryl : CoreVar) + (hetmet_pga_curryr : CoreVar) : list (@CoreBind CoreVar) := + coqPassCoreToCore' + hetmet_brak + hetmet_esc + hetmet_flatten + hetmet_unflatten + hetmet_flattened_id + uniqueSupply + hetmet_PGArrowTyCon + hetmet_PGArrow_unit_TyCon + hetmet_PGArrow_tensor_TyCon +(* hetmet_PGArrow_exponent_TyCon*) + hetmet_pga_id + hetmet_pga_comp + hetmet_pga_first + hetmet_pga_second + hetmet_pga_cancell + hetmet_pga_cancelr + hetmet_pga_uncancell + hetmet_pga_uncancelr + hetmet_pga_assoc + hetmet_pga_unassoc + hetmet_pga_copy + hetmet_pga_drop + hetmet_pga_swap + lbinds + (* + hetmet_pga_applyl + hetmet_pga_applyr + hetmet_pga_curryl + *) + + . End core2proof. diff --git a/src/General.v b/src/General.v index d017894..5d19e9c 100644 --- a/src/General.v +++ b/src/General.v @@ -20,6 +20,24 @@ Class EqDecidable (T:Type) := }. Coercion eqd_type : EqDecidable >-> Sortclass. +Instance EqDecidableOption (T:Type)(EQDT:EqDecidable T) : EqDecidable ??T. + apply Build_EqDecidable. + intros. + destruct v1; + destruct v2. + destruct (eqd_dec t t0). + subst. + left; auto. + right. + unfold not; intros. + inversion H. + subst. + apply n. + auto. + right; unfold not; intro; inversion H. + right; unfold not; intro; inversion H. + left; auto. + Defined. Class ToString (T:Type) := { toString : T -> string }. Instance StringToString : ToString string := { toString := fun x => x }. @@ -89,6 +107,13 @@ Fixpoint treeReduce {T:Type}{R:Type}(mapLeaf:T->R)(mergeBranches:R->R->R) (t:Tre Definition treeDecomposition {D T:Type} (mapLeaf:T->D) (mergeBranches:D->D->D) := forall d:D, { tt:Tree T & d = treeReduce mapLeaf mergeBranches tt }. +Fixpoint reduceTree {T}(unit:T)(merge:T -> T -> T)(tt:Tree ??T) : T := + match tt with + | T_Leaf None => unit + | T_Leaf (Some x) => x + | T_Branch b1 b2 => merge (reduceTree unit merge b1) (reduceTree unit merge b2) + end. + Lemma tree_dec_eq : forall {Q}(t1 t2:Tree ??Q), (forall q1 q2:Q, sumbool (q1=q2) (not (q1=q2))) -> @@ -444,6 +469,106 @@ Instance EqDecidableList {T:Type}(eqd:EqDecidable T) : EqDecidable (list T). apply eqd_dec. Defined. +Fixpoint listToString {T:Type}{tst:ToString T}(l:list T) : string := + match l with + | nil => "nil" + | a::b => (toString a) +++ "::" +++ listToString b + end. + +Instance ListToString {T:Type}{tst:ToString T} : ToString (list T) := + { toString := @listToString _ _ }. + + +(*******************************************************************************) +(* Tree Flags *) + +(* TreeFlags is effectively a tree of booleans whose shape matches that of another tree *) +Inductive TreeFlags {T:Type} : Tree T -> Type := +| tf_leaf_true : forall x, TreeFlags (T_Leaf x) +| tf_leaf_false : forall x, TreeFlags (T_Leaf x) +| tf_branch : forall b1 b2, TreeFlags b1 -> TreeFlags b2 -> TreeFlags (b1,,b2). + +(* If flags are calculated using a local condition, this will build the flags *) +Fixpoint mkFlags {T}(f:T -> bool)(t:Tree T) : TreeFlags t := + match t as T return TreeFlags T with + | T_Leaf x => if f x then tf_leaf_true x else tf_leaf_false x + | T_Branch b1 b2 => tf_branch _ _ (mkFlags f b1) (mkFlags f b2) + end. + +(* takeT and dropT are not exact inverses! *) + +(* drop replaces each leaf where the flag is set with a [] *) +Fixpoint dropT {T}{Σ:Tree ??T}(tf:TreeFlags Σ) : Tree ??T := + match tf with + | tf_leaf_true x => [] + | tf_leaf_false x => Σ + | tf_branch b1 b2 tb1 tb2 => (dropT tb1),,(dropT tb2) + end. + +(* takeT returns only those leaves for which the flag is set; all others are omitted entirely from the tree *) +Fixpoint takeT {T}{Σ:Tree T}(tf:TreeFlags Σ) : ??(Tree T) := + match tf with + | tf_leaf_true x => Some Σ + | tf_leaf_false x => None + | tf_branch b1 b2 tb1 tb2 => + match takeT tb1 with + | None => takeT tb2 + | Some b1' => match takeT tb2 with + | None => Some b1' + | Some b2' => Some (b1',,b2') + end + end + end. + +(* lift a function T->bool to a function (option T)->bool by yielding (None |-> b) *) +Definition liftBoolFunc {T}(b:bool)(f:T -> bool) : ??T -> bool := + fun t => + match t with + | None => b + | Some x => f x + end. + +(* decidable quality on a tree of elements which have decidable equality *) +Definition tree_eq_dec : forall {T:Type}(l1 l2:Tree T)(dec:forall t1 t2:T, sumbool (eq t1 t2) (not (eq t1 t2))), + sumbool (eq l1 l2) (not (eq l1 l2)). + intro T. + intro l1. + induction l1; intros. + destruct l2. + destruct (dec a t). + subst. + left; auto. + right; unfold not; intro; apply n; inversion H; auto. + right. + unfold not; intro. + inversion H. + + destruct l2. + right; unfold not; intro; inversion H. + destruct (IHl1_1 l2_1 dec); + destruct (IHl1_2 l2_2 dec); subst. + left; auto. + right. + unfold not; intro. + inversion H; subst. + apply n; auto. + right. + unfold not; intro. + inversion H; subst. + apply n; auto. + right. + unfold not; intro. + inversion H; subst. + apply n; auto. + Defined. + +Instance EqDecidableTree {T:Type}(eqd:EqDecidable T) : EqDecidable (Tree T). + apply Build_EqDecidable. + intros. + apply tree_eq_dec. + apply eqd_dec. + Defined. + (*******************************************************************************) (* Length-Indexed Lists *) @@ -739,6 +864,8 @@ Lemma extensionality_composes : forall t1 t2 t3 (f f':t1->t2) (g g':t2->t3), Definition map2 {A}{B}(f:A->B)(t:A*A) : (B*B) := ((f (fst t)), (f (snd t))). +(* boolean "not" *) +Definition bnot (b:bool) : bool := if b then false else true. (* string stuff *) Variable eol : string. @@ -826,6 +953,16 @@ Lemma list2vecOrFail {T}(l:list T)(n:nat)(error_message:nat->nat->string) : ???( apply (Error (error_message (length l) n)). Defined. +(* this makes a type function application, ensuring not to oversaturate it (though if it was undersaturated we can't fix that) *) +Fixpoint split_list {T}(l:list T)(n:nat) : ???(list T * list T) := + match n with + | O => OK (nil , l) + | S n' => match l with + | nil => Error "take_list failed" + | h::t => split_list t n' >>= fun t' => let (t1,t2) := t' in OK ((h::t1),t2) + end + end. + (* Uniques *) Variable UniqSupply : Type. Extract Inlined Constant UniqSupply => "UniqSupply.UniqSupply". Variable Unique : Type. Extract Inlined Constant Unique => "Unique.Unique". diff --git a/src/HaskCore.v b/src/HaskCore.v index 9024828..b05c34f 100644 --- a/src/HaskCore.v +++ b/src/HaskCore.v @@ -7,7 +7,8 @@ Require Import Preamble. Require Import General. Require Import Coq.Strings.String. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskCoreTypes. Require Import HaskCoreVars. diff --git a/src/HaskCoreToWeak.v b/src/HaskCoreToWeak.v index abcd6b8..ccd4973 100644 --- a/src/HaskCoreToWeak.v +++ b/src/HaskCoreToWeak.v @@ -7,7 +7,8 @@ Require Import Preamble. Require Import Coq.Lists.List. Require Import General. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskCoreVars. Require Import HaskCoreTypes. Require Import HaskCore. @@ -28,6 +29,12 @@ Variable hetmet_brak_name : CoreName. Extract Inlined Constant he Variable hetmet_esc_name : CoreName. Extract Inlined Constant hetmet_esc_name => "PrelNames.hetmet_esc_name". Variable hetmet_csp_name : CoreName. Extract Inlined Constant hetmet_csp_name => "PrelNames.hetmet_csp_name". +Definition mkTyFunApplication (tf:TyFun)(lwt:list WeakType) : ???WeakType := + split_list lwt (length (fst (tyFunKind tf))) >>= + fun argsrest => + let (args,rest) := argsrest in + OK (fold_left (fun x y => WAppTy x y) rest (WTyFunApp tf args)). + Fixpoint coreTypeToWeakType' (ct:CoreType) : ???WeakType := match ct with | TyVarTy cv => match coreVarToWeakVar cv with @@ -44,7 +51,7 @@ Fixpoint coreTypeToWeakType' (ct:CoreType) : ???WeakType := | a::b => coreTypeToWeakType' a >>= fun a' => rec b >>= fun b' => OK (a'::b') end) lct) in match tyConOrTyFun tc_ with - | inr tf => recurse >>= fun recurse' => OK (WTyFunApp tf recurse') + | inr tf => recurse >>= fun recurse' => mkTyFunApplication tf recurse' | inl tc => if eqd_dec tc ModalBoxTyCon then match lct with | ((TyVarTy ec)::tbody::nil) => @@ -68,9 +75,11 @@ Fixpoint coreTypeToWeakType' (ct:CoreType) : ???WeakType := coreTypeToWeakType' t2 >>= fun t2' => OK (WAppTy (WAppTy WFunTyCon t1') t2') | ForAllTy cv t => match coreVarToWeakVar cv with - | WExprVar _ => Error "encountered expression variable in a type" + | WExprVar _ => Error "encountered expression variable in a type abstraction" | WTypeVar tv => coreTypeToWeakType' t >>= fun t' => OK (WForAllTy tv t') - | WCoerVar _ => Error "encountered coercion variable in a type" + | WCoerVar (weakCoerVar v t1' t2') => + coreTypeToWeakType' t >>= fun t3' => + OK (WCoFunTy t1' t2' t3') end | PredTy (ClassP cl lct) => ((fix rec tl := match tl with | nil => OK nil @@ -81,7 +90,8 @@ Fixpoint coreTypeToWeakType' (ct:CoreType) : ???WeakType := | PredTy (EqPred _ _) => Error "hit a bare EqPred" end. -Fixpoint coreTypeToWeakType t := addErrorMessage "coreTypeToWeakType" (coreTypeToWeakType' (coreViewDeep t)). +Definition coreTypeToWeakType t := + addErrorMessage ("during coreTypeToWeakType on " +++ toString t +++ eol) (coreTypeToWeakType' (coreViewDeep t)). (* detects our crude Core-encoding of modal type introduction/elimination forms *) Definition isBrak (ce:@CoreExpr CoreVar) : ??(WeakExprVar * WeakTypeVar * CoreType) := diff --git a/src/HaskCoreTypes.v b/src/HaskCoreTypes.v index 8aa81ee..4babf36 100644 --- a/src/HaskCoreTypes.v +++ b/src/HaskCoreTypes.v @@ -9,7 +9,8 @@ Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import HaskKinds. Require Import HaskCoreVars. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Variable CoreCoercion : Type. Extract Inlined Constant CoreCoercion => "Coercion.Coercion". Variable classTyCon : Class_ -> CoreTyCon. Extract Inlined Constant classTyCon => "Class.classTyCon". @@ -35,9 +36,11 @@ Extract Inductive PredType => Variable coreNameToString : CoreName -> string. Extract Inlined Constant coreNameToString => "outputableToString". Variable coreCoercionToString : CoreCoercion -> string. Extract Inlined Constant coreCoercionToString => "outputableToString". -Variable coreCoercionKind : CoreCoercion -> CoreType*CoreType. Extract Inlined Constant coreCoercionKind => "Coercion.coercionKind". +Variable coreCoercionKind : Kind -> CoreType*CoreType. + Extract Inlined Constant coreCoercionKind => "(Coercion.coercionKind . kindToCoreKind)". Variable kindOfCoreType : CoreType -> Kind. Extract Inlined Constant kindOfCoreType => "(coreKindToKind . Coercion.typeKind)". Variable coreTypeToString : CoreType -> string. Extract Inlined Constant coreTypeToString => "(outputableToString . coreViewDeep)". +Variable setVarType : CoreVar -> CoreType -> CoreVar. Extract Inlined Constant setVarType => "Var.setVarType". (* GHC provides decision procedures for equality on its primitive types; we tell Coq to blindly trust them *) Variable coreTyCon_eq : EqDecider CoreTyCon. Extract Inlined Constant coreTyCon_eq => "(==)". diff --git a/src/HaskCoreVars.v b/src/HaskCoreVars.v index d158f05..8b0aabb 100644 --- a/src/HaskCoreVars.v +++ b/src/HaskCoreVars.v @@ -6,7 +6,8 @@ Generalizable All Variables. Require Import Preamble. Require Import General. Require Import Coq.Strings.String. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. (* GHC uses a single type for expression variables, type variables, and coercion variables; this is that type *) Variable CoreVar : Type. Extract Inlined Constant CoreVar => "Var.Var". diff --git a/src/HaskFlattener.v b/src/HaskFlattener.v new file mode 100644 index 0000000..90785b0 --- /dev/null +++ b/src/HaskFlattener.v @@ -0,0 +1,1269 @@ +(*********************************************************************************************************************************) +(* HaskFlattener: *) +(* *) +(* The Flattening Functor. *) +(* *) +(*********************************************************************************************************************************) + +Generalizable All Variables. +Require Import Preamble. +Require Import General. +Require Import NaturalDeduction. +Require Import Coq.Strings.String. +Require Import Coq.Lists.List. + +Require Import HaskKinds. +Require Import HaskCoreTypes. +Require Import HaskCoreVars. +Require Import HaskWeakTypes. +Require Import HaskWeakVars. +Require Import HaskLiterals. +Require Import HaskTyCons. +Require Import HaskStrongTypes. +Require Import HaskProof. +Require Import NaturalDeduction. + +Require Import HaskStrongTypes. +Require Import HaskStrong. +Require Import HaskProof. +Require Import HaskStrongToProof. +Require Import HaskProofToStrong. +Require Import HaskWeakToStrong. + +Require Import HaskSkolemizer. + +Open Scope nd_scope. +Set Printing Width 130. + +(* + * The flattening transformation. Currently only TWO-level languages are + * supported, and the level-1 sublanguage is rather limited. + * + * This file abuses terminology pretty badly. For purposes of this file, + * "PCF" means "the level-1 sublanguage" and "FC" (aka System FC) means + * the whole language (level-0 language including bracketed level-1 terms) + *) +Section HaskFlattener. + + Definition getlev {Γ}{κ}(lht:LeveledHaskType Γ κ) : HaskLevel Γ := + match lht with t @@ l => l end. + + Definition arrange : + forall {T} (Σ:Tree ??T) (f:T -> bool), + Arrange Σ (dropT (mkFlags (liftBoolFunc false f) Σ),,( (dropT (mkFlags (liftBoolFunc false (bnot ○ f)) Σ)))). + intros. + induction Σ. + simpl. + destruct a. + simpl. + destruct (f t); simpl. + apply RuCanL. + apply RuCanR. + simpl. + apply RuCanL. + simpl in *. + eapply RComp; [ idtac | apply arrangeSwapMiddle ]. + eapply RComp. + eapply RLeft. + apply IHΣ2. + eapply RRight. + apply IHΣ1. + Defined. + + Definition arrange' : + forall {T} (Σ:Tree ??T) (f:T -> bool), + Arrange (dropT (mkFlags (liftBoolFunc false f) Σ),,( (dropT (mkFlags (liftBoolFunc false (bnot ○ f)) Σ)))) Σ. + intros. + induction Σ. + simpl. + destruct a. + simpl. + destruct (f t); simpl. + apply RCanL. + apply RCanR. + simpl. + apply RCanL. + simpl in *. + eapply RComp; [ apply arrangeSwapMiddle | idtac ]. + eapply RComp. + eapply RLeft. + apply IHΣ2. + eapply RRight. + apply IHΣ1. + Defined. + + Ltac eqd_dec_refl' := + match goal with + | [ |- context[@eqd_dec ?T ?V ?X ?X] ] => + destruct (@eqd_dec T V X X) as [eqd_dec1 | eqd_dec2]; + [ clear eqd_dec1 | set (eqd_dec2 (refl_equal _)) as eqd_dec2'; inversion eqd_dec2' ] + end. + + Definition v2t {Γ}(ec:HaskTyVar Γ ECKind) : HaskType Γ ECKind := fun TV ite => TVar (ec TV ite). + + Definition levelMatch {Γ}(lev:HaskLevel Γ) : LeveledHaskType Γ ★ -> bool := + fun t => match t with ttype@@tlev => if eqd_dec tlev lev then true else false end. + + (* In a tree of types, replace any type at depth "lev" or greater None *) + Definition mkDropFlags {Γ}(lev:HaskLevel Γ)(tt:Tree ??(LeveledHaskType Γ ★)) : TreeFlags tt := + mkFlags (liftBoolFunc false (levelMatch lev)) tt. + + Definition drop_lev {Γ}(lev:HaskLevel Γ)(tt:Tree ??(LeveledHaskType Γ ★)) : Tree ??(LeveledHaskType Γ ★) := + dropT (mkDropFlags lev tt). + + (* The opposite: replace any type which is NOT at level "lev" with None *) + Definition mkTakeFlags {Γ}(lev:HaskLevel Γ)(tt:Tree ??(LeveledHaskType Γ ★)) : TreeFlags tt := + mkFlags (liftBoolFunc true (bnot ○ levelMatch lev)) tt. + + Definition take_lev {Γ}(lev:HaskLevel Γ)(tt:Tree ??(LeveledHaskType Γ ★)) : Tree ??(LeveledHaskType Γ ★) := + dropT (mkTakeFlags lev tt). +(* + mapOptionTree (fun x => flatten_type (unlev x)) + (maybeTree (takeT tt (mkFlags ( + fun t => match t with + | Some (ttype @@ tlev) => if eqd_dec tlev lev then true else false + | _ => true + end + ) tt))). + + Definition maybeTree {T}(t:??(Tree ??T)) : Tree ??T := + match t with + | None => [] + | Some x => x + end. +*) + + Lemma drop_lev_lemma : forall Γ (lev:HaskLevel Γ) x, drop_lev lev [x @@ lev] = []. + intros; simpl. + Opaque eqd_dec. + unfold drop_lev. + simpl. + unfold mkDropFlags. + simpl. + Transparent eqd_dec. + eqd_dec_refl'. + auto. + Qed. + + Lemma drop_lev_lemma_s : forall Γ (lev:HaskLevel Γ) ec x, drop_lev (ec::lev) [x @@ (ec :: lev)] = []. + intros; simpl. + Opaque eqd_dec. + unfold drop_lev. + unfold mkDropFlags. + simpl. + Transparent eqd_dec. + eqd_dec_refl'. + auto. + Qed. + + Lemma take_lemma : forall Γ (lev:HaskLevel Γ) x, take_lev lev [x @@ lev] = [x @@ lev]. + intros; simpl. + Opaque eqd_dec. + unfold take_lev. + unfold mkTakeFlags. + simpl. + Transparent eqd_dec. + eqd_dec_refl'. + auto. + Qed. + + Lemma take_lemma' : forall Γ (lev:HaskLevel Γ) x, take_lev lev (x @@@ lev) = x @@@ lev. + intros. + induction x. + destruct a; simpl; try reflexivity. + apply take_lemma. + simpl. + rewrite <- IHx1 at 2. + rewrite <- IHx2 at 2. + reflexivity. + Qed. +(* + Lemma drop_lev_lemma' : forall Γ (lev:HaskLevel Γ) x, drop_lev lev (x @@@ lev) = []. + intros. + induction x. + destruct a; simpl; try reflexivity. + apply drop_lev_lemma. + simpl. + change (@drop_lev _ lev (x1 @@@ lev ,, x2 @@@ lev)) + with ((@drop_lev _ lev (x1 @@@ lev)) ,, (@drop_lev _ lev (x2 @@@ lev))). + simpl. + rewrite IHx1. + rewrite IHx2. + reflexivity. + Qed. +*) + Ltac drop_simplify := + match goal with + | [ |- context[@drop_lev ?G ?L [ ?X @@ ?L ] ] ] => + rewrite (drop_lev_lemma G L X) +(* + | [ |- context[@drop_lev ?G ?L [ ?X @@@ ?L ] ] ] => + rewrite (drop_lev_lemma' G L X) +*) + | [ |- context[@drop_lev ?G (?E :: ?L) [ ?X @@ (?E :: ?L) ] ] ] => + rewrite (drop_lev_lemma_s G L E X) + | [ |- context[@drop_lev ?G ?N (?A,,?B)] ] => + change (@drop_lev G N (A,,B)) with ((@drop_lev G N A),,(@drop_lev G N B)) + | [ |- context[@drop_lev ?G ?N (T_Leaf None)] ] => + change (@drop_lev G N (T_Leaf (@None (LeveledHaskType G ★)))) with (T_Leaf (@None (LeveledHaskType G ★))) + end. + + Ltac take_simplify := + match goal with + | [ |- context[@take_lev ?G ?L [ ?X @@ ?L ] ] ] => + rewrite (take_lemma G L X) + | [ |- context[@take_lev ?G ?L [ ?X @@@ ?L ] ] ] => + rewrite (take_lemma' G L X) + | [ |- context[@take_lev ?G ?N (?A,,?B)] ] => + change (@take_lev G N (A,,B)) with ((@take_lev G N A),,(@take_lev G N B)) + | [ |- context[@take_lev ?G ?N (T_Leaf None)] ] => + change (@take_lev G N (T_Leaf (@None (LeveledHaskType G ★)))) with (T_Leaf (@None (LeveledHaskType G ★))) + end. + + + (*******************************************************************************) + + + Context (hetmet_flatten : WeakExprVar). + Context (hetmet_unflatten : WeakExprVar). + Context (hetmet_id : WeakExprVar). + Context {unitTy : forall TV, RawHaskType TV ECKind -> RawHaskType TV ★ }. + Context {prodTy : forall TV, RawHaskType TV ECKind -> RawHaskType TV ★ -> RawHaskType TV ★ -> RawHaskType TV ★ }. + Context {gaTy : forall TV, RawHaskType TV ECKind -> RawHaskType TV ★ -> RawHaskType TV ★ -> RawHaskType TV ★ }. + + Definition ga_mk_tree' {TV}(ec:RawHaskType TV ECKind)(tr:Tree ??(RawHaskType TV ★)) : RawHaskType TV ★ := + reduceTree (unitTy TV ec) (prodTy TV ec) tr. + + Definition ga_mk_tree {Γ}(ec:HaskType Γ ECKind)(tr:Tree ??(HaskType Γ ★)) : HaskType Γ ★ := + fun TV ite => ga_mk_tree' (ec TV ite) (mapOptionTree (fun x => x TV ite) tr). + + Definition ga_mk_raw {TV}(ec:RawHaskType TV ECKind)(ant suc:Tree ??(RawHaskType TV ★)) : RawHaskType TV ★ := + gaTy TV ec + (ga_mk_tree' ec ant) + (ga_mk_tree' ec suc). + + Definition ga_mk {Γ}(ec:HaskType Γ ECKind)(ant suc:Tree ??(HaskType Γ ★)) : HaskType Γ ★ := + fun TV ite => gaTy TV (ec TV ite) (ga_mk_tree ec ant TV ite) (ga_mk_tree ec suc TV ite). + + (* + * The story: + * - code types <[t]>@c become garrows c () t + * - free variables of type t at a level lev deeper than the succedent become garrows c () t + * - free variables at the level of the succedent become + *) + Fixpoint flatten_rawtype {TV}{κ}(exp: RawHaskType TV κ) : RawHaskType TV κ := + match exp with + | TVar _ x => TVar x + | TAll _ y => TAll _ (fun v => flatten_rawtype (y v)) + | TApp _ _ x y => TApp (flatten_rawtype x) (flatten_rawtype y) + | TCon tc => TCon tc + | TCoerc _ t1 t2 t => TCoerc (flatten_rawtype t1) (flatten_rawtype t2) (flatten_rawtype t) + | TArrow => TArrow + | TCode ec e => let e' := flatten_rawtype e + in ga_mk_raw ec (unleaves' (take_arg_types e')) [drop_arg_types e'] + | TyFunApp tfc kl k lt => TyFunApp tfc kl k (flatten_rawtype_list _ lt) + end + with flatten_rawtype_list {TV}(lk:list Kind)(exp:@RawHaskTypeList TV lk) : @RawHaskTypeList TV lk := + match exp in @RawHaskTypeList _ LK return @RawHaskTypeList TV LK with + | TyFunApp_nil => TyFunApp_nil + | TyFunApp_cons κ kl t rest => TyFunApp_cons _ _ (flatten_rawtype t) (flatten_rawtype_list _ rest) + end. + + Definition flatten_type {Γ}{κ}(ht:HaskType Γ κ) : HaskType Γ κ := + fun TV ite => flatten_rawtype (ht TV ite). + + Fixpoint levels_to_tcode {Γ}(ht:HaskType Γ ★)(lev:HaskLevel Γ) : HaskType Γ ★ := + match lev with + | nil => flatten_type ht + | ec::lev' => @ga_mk _ (v2t ec) [] [levels_to_tcode ht lev'] + end. + + Definition flatten_leveled_type {Γ}(ht:LeveledHaskType Γ ★) : LeveledHaskType Γ ★ := + levels_to_tcode (unlev ht) (getlev ht) @@ nil. + + (* AXIOMS *) + + Axiom literal_types_unchanged : forall Γ l, flatten_type (literalType l) = literalType(Γ:=Γ) l. + + Axiom flatten_coercion : forall Γ Δ κ (σ τ:HaskType Γ κ) (γ:HaskCoercion Γ Δ (σ ∼∼∼ τ)), + HaskCoercion Γ Δ (flatten_type σ ∼∼∼ flatten_type τ). + + Axiom flatten_commutes_with_substT : + forall κ Γ (Δ:CoercionEnv Γ) (σ:∀ TV, InstantiatedTypeEnv TV Γ → TV κ → RawHaskType TV ★) (τ:HaskType Γ κ), + flatten_type (substT σ τ) = substT (fun TV ite v => flatten_rawtype (σ TV ite v)) + (flatten_type τ). + + Axiom flatten_commutes_with_HaskTAll : + forall κ Γ (Δ:CoercionEnv Γ) (σ:∀ TV, InstantiatedTypeEnv TV Γ → TV κ → RawHaskType TV ★), + flatten_type (HaskTAll κ σ) = HaskTAll κ (fun TV ite v => flatten_rawtype (σ TV ite v)). + + Axiom flatten_commutes_with_HaskTApp : + forall κ Γ (Δ:CoercionEnv Γ) (σ:∀ TV, InstantiatedTypeEnv TV Γ → TV κ → RawHaskType TV ★), + flatten_type (HaskTApp (weakF σ) (FreshHaskTyVar κ)) = + HaskTApp (weakF (fun TV ite v => flatten_rawtype (σ TV ite v))) (FreshHaskTyVar κ). + + Axiom flatten_commutes_with_weakLT : forall (Γ:TypeEnv) κ t, + flatten_leveled_type (weakLT(Γ:=Γ)(κ:=κ) t) = weakLT(Γ:=Γ)(κ:=κ) (flatten_leveled_type t). + + Axiom globals_do_not_have_code_types : forall (Γ:TypeEnv) (g:Global Γ) v, + flatten_type (g v) = g v. + + (* This tries to assign a single level to the entire succedent of a judgment. If the succedent has types from different + * levels (should not happen) it just picks one; if the succedent has no non-None leaves (also should not happen) it + * picks nil *) + Definition getΓ (j:Judg) := match j with Γ > _ > _ |- _ => Γ end. + Definition getSuc (j:Judg) : Tree ??(LeveledHaskType (getΓ j) ★) := + match j as J return Tree ??(LeveledHaskType (getΓ J) ★) with Γ > _ > _ |- s => s end. + Fixpoint getjlev {Γ}(tt:Tree ??(LeveledHaskType Γ ★)) : HaskLevel Γ := + match tt with + | T_Leaf None => nil + | T_Leaf (Some (_ @@ lev)) => lev + | T_Branch b1 b2 => + match getjlev b1 with + | nil => getjlev b2 + | lev => lev + end + end. + + (* "n" is the maximum depth remaining AFTER flattening *) + Definition flatten_judgment (j:Judg) := + match j as J return Judg with + Γ > Δ > ant |- suc => + match getjlev suc with + | nil => Γ > Δ > mapOptionTree flatten_leveled_type ant + |- mapOptionTree flatten_leveled_type suc + + | (ec::lev') => Γ > Δ > mapOptionTree flatten_leveled_type (drop_lev (ec::lev') ant) + |- [ga_mk (v2t ec) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec::lev') ant)) + (mapOptionTree (flatten_type ○ unlev) suc ) + @@ nil] (* we know the level of all of suc *) + end + end. + + Class garrow := + { ga_id : ∀ Γ Δ ec l a , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec a a @@ l] ] + ; ga_cancelr : ∀ Γ Δ ec l a , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec (a,,[]) a @@ l] ] + ; ga_cancell : ∀ Γ Δ ec l a , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec ([],,a) a @@ l] ] + ; ga_uncancelr : ∀ Γ Δ ec l a , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec a (a,,[]) @@ l] ] + ; ga_uncancell : ∀ Γ Δ ec l a , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec a ([],,a) @@ l] ] + ; ga_assoc : ∀ Γ Δ ec l a b c, ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec ((a,,b),,c) (a,,(b,,c)) @@ l] ] + ; ga_unassoc : ∀ Γ Δ ec l a b c, ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec (a,,(b,,c)) ((a,,b),,c) @@ l] ] + ; ga_swap : ∀ Γ Δ ec l a b , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec (a,,b) (b,,a) @@ l] ] + ; ga_drop : ∀ Γ Δ ec l a , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec a [] @@ l] ] + ; ga_copy : ∀ Γ Δ ec l a , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec a (a,,a) @@ l] ] + ; ga_first : ∀ Γ Δ ec l a b x, ND Rule [] [Γ > Δ > [@ga_mk Γ ec a b @@ l] |- [@ga_mk Γ ec (a,,x) (b,,x) @@ l] ] + ; ga_second : ∀ Γ Δ ec l a b x, ND Rule [] [Γ > Δ > [@ga_mk Γ ec a b @@ l] |- [@ga_mk Γ ec (x,,a) (x,,b) @@ l] ] + ; ga_lit : ∀ Γ Δ ec l lit , ND Rule [] [Γ > Δ > [] |- [@ga_mk Γ ec [] [literalType lit] @@ l] ] + ; ga_curry : ∀ Γ Δ ec l a b c, ND Rule [] [Γ > Δ > [@ga_mk Γ ec (a,,[b]) [c] @@ l] |- [@ga_mk Γ ec a [b ---> c] @@ l] ] + ; ga_comp : ∀ Γ Δ ec l a b c, ND Rule [] [Γ > Δ > [@ga_mk Γ ec a b @@ l],,[@ga_mk Γ ec b c @@ l] |- [@ga_mk Γ ec a c @@ l] ] + ; ga_apply : ∀ Γ Δ ec l a a' b c, + ND Rule [] [Γ > Δ > [@ga_mk Γ ec a [b ---> c] @@ l],,[@ga_mk Γ ec a' [b] @@ l] |- [@ga_mk Γ ec (a,,a') [c] @@ l] ] + ; ga_kappa : ∀ Γ Δ ec l a b Σ, ND Rule + [Γ > Δ > Σ,,[@ga_mk Γ ec [] a @@ l] |- [@ga_mk Γ ec [] b @@ l] ] + [Γ > Δ > Σ |- [@ga_mk Γ ec a b @@ l] ] + }. + Context `(gar:garrow). + + Notation "a ~~~~> b" := (@ga_mk _ _ a b) (at level 20). + + Definition boost : forall Γ Δ ant x y {lev}, + ND Rule [] [ Γ > Δ > [x@@lev] |- [y@@lev] ] -> + ND Rule [ Γ > Δ > ant |- [x@@lev] ] [ Γ > Δ > ant |- [y@@lev] ]. + intros. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RCanL ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply (@RLet Γ Δ [] ant y x lev) ]. + eapply nd_comp; [ apply nd_rlecnac | idtac ]. + apply nd_prod. + apply nd_id. + eapply nd_comp. + apply X. + eapply nd_rule. + eapply RArrange. + apply RuCanL. + Defined. + + Definition postcompose' : ∀ Γ Δ ec lev a b c Σ, + ND Rule [] [ Γ > Δ > Σ |- [@ga_mk Γ ec a b @@ lev] ] -> + ND Rule [] [ Γ > Δ > Σ,,[@ga_mk Γ ec b c @@ lev] |- [@ga_mk Γ ec a c @@ lev] ]. + intros. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RExch ]. + eapply nd_comp; [ idtac + | eapply nd_rule; apply (@RLet Γ Δ [@ga_mk _ ec b c @@lev] Σ (@ga_mk _ ec a c) (@ga_mk _ ec a b) lev) ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod. + apply X. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RExch ]. + apply ga_comp. + Defined. + + Definition precompose Γ Δ ec : forall a x y z lev, + ND Rule + [ Γ > Δ > a |- [@ga_mk _ ec y z @@ lev] ] + [ Γ > Δ > a,,[@ga_mk _ ec x y @@ lev] |- [@ga_mk _ ec x z @@ lev] ]. + intros. + eapply nd_comp. + apply nd_rlecnac. + eapply nd_comp. + eapply nd_prod. + apply nd_id. + eapply ga_comp. + + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RExch ]. + + apply nd_rule. + apply RLet. + Defined. + + Definition precompose' : ∀ Γ Δ ec lev a b c Σ, + ND Rule [] [ Γ > Δ > Σ |- [@ga_mk Γ ec b c @@ lev] ] -> + ND Rule [] [ Γ > Δ > Σ,,[@ga_mk Γ ec a b @@ lev] |- [@ga_mk Γ ec a c @@ lev] ]. + intros. + eapply nd_comp. + apply X. + apply precompose. + Defined. + + Definition postcompose : ∀ Γ Δ ec lev a b c, + ND Rule [] [ Γ > Δ > [] |- [@ga_mk Γ ec a b @@ lev] ] -> + ND Rule [] [ Γ > Δ > [@ga_mk Γ ec b c @@ lev] |- [@ga_mk Γ ec a c @@ lev] ]. + intros. + eapply nd_comp. + apply postcompose'. + apply X. + apply nd_rule. + apply RArrange. + apply RCanL. + Defined. + + Definition firstify : ∀ Γ Δ ec lev a b c Σ, + ND Rule [] [ Γ > Δ > Σ |- [@ga_mk Γ ec a b @@ lev] ] -> + ND Rule [] [ Γ > Δ > Σ |- [@ga_mk Γ ec (a,,c) (b,,c) @@ lev] ]. + intros. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RCanL ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply RLet ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod. + apply X. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RuCanL ]. + apply ga_first. + Defined. + + Definition secondify : ∀ Γ Δ ec lev a b c Σ, + ND Rule [] [ Γ > Δ > Σ |- [@ga_mk Γ ec a b @@ lev] ] -> + ND Rule [] [ Γ > Δ > Σ |- [@ga_mk Γ ec (c,,a) (c,,b) @@ lev] ]. + intros. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RCanL ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply RLet ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod. + apply X. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RuCanL ]. + apply ga_second. + Defined. + + Lemma ga_unkappa : ∀ Γ Δ ec l z a b Σ, + ND Rule + [Γ > Δ > Σ |- [@ga_mk Γ ec a b @@ l] ] + [Γ > Δ > Σ,,[@ga_mk Γ ec z a @@ l] |- [@ga_mk Γ ec z b @@ l] ]. + intros. + set (ga_comp Γ Δ ec l z a b) as q. + + set (@RLet Γ Δ) as q'. + set (@RLet Γ Δ [@ga_mk _ ec z a @@ l] Σ (@ga_mk _ ec z b) (@ga_mk _ ec a b) l) as q''. + eapply nd_comp. + Focus 2. + eapply nd_rule. + eapply RArrange. + apply RExch. + + eapply nd_comp. + Focus 2. + eapply nd_rule. + apply q''. + + idtac. + clear q'' q'. + eapply nd_comp. + apply nd_rlecnac. + apply nd_prod. + apply nd_id. + apply q. + Defined. + + Lemma ga_unkappa' : ∀ Γ Δ ec l a b Σ x, + ND Rule + [Γ > Δ > Σ |- [@ga_mk Γ ec (a,,x) b @@ l] ] + [Γ > Δ > Σ,,[@ga_mk Γ ec [] a @@ l] |- [@ga_mk Γ ec x b @@ l] ]. + intros. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RLet ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod. + apply ga_first. + + eapply nd_comp; [ idtac | eapply nd_rule; eapply RLet ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod. + apply postcompose. + apply ga_uncancell. + apply precompose. + Defined. + + Lemma ga_kappa' : ∀ Γ Δ ec l a b Σ x, + ND Rule + [Γ > Δ > Σ,,[@ga_mk Γ ec [] a @@ l] |- [@ga_mk Γ ec x b @@ l] ] + [Γ > Δ > Σ |- [@ga_mk Γ ec (a,,x) b @@ l] ]. + apply (Prelude_error "ga_kappa not supported yet (BIG FIXME)"). + Defined. + + (* useful for cutting down on the pretty-printed noise + + Notation "` x" := (take_lev _ x) (at level 20). + Notation "`` x" := (mapOptionTree unlev x) (at level 20). + Notation "``` x" := (drop_lev _ x) (at level 20). + *) + Definition flatten_arrangement' : + forall Γ (Δ:CoercionEnv Γ) + (ec:HaskTyVar Γ ECKind) (lev:HaskLevel Γ) (ant1 ant2:Tree ??(LeveledHaskType Γ ★)) (r:Arrange ant1 ant2), + ND Rule [] [Γ > Δ > [] |- [@ga_mk _ (v2t ec) (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) ant2)) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) ant1)) @@ nil] ]. + + intros Γ Δ ec lev. + refine (fix flatten ant1 ant2 (r:Arrange ant1 ant2): + ND Rule [] [Γ > Δ > [] |- [@ga_mk _ (v2t ec) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) ant2)) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) ant1)) @@ nil]] := + match r as R in Arrange A B return + ND Rule [] [Γ > Δ > [] |- [@ga_mk _ (v2t ec) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) B)) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) A)) @@ nil]] + with + | RCanL a => let case_RCanL := tt in ga_uncancell _ _ _ _ _ + | RCanR a => let case_RCanR := tt in ga_uncancelr _ _ _ _ _ + | RuCanL a => let case_RuCanL := tt in ga_cancell _ _ _ _ _ + | RuCanR a => let case_RuCanR := tt in ga_cancelr _ _ _ _ _ + | RAssoc a b c => let case_RAssoc := tt in ga_assoc _ _ _ _ _ _ _ + | RCossa a b c => let case_RCossa := tt in ga_unassoc _ _ _ _ _ _ _ + | RExch a b => let case_RExch := tt in ga_swap _ _ _ _ _ _ + | RWeak a => let case_RWeak := tt in ga_drop _ _ _ _ _ + | RCont a => let case_RCont := tt in ga_copy _ _ _ _ _ + | RLeft a b c r' => let case_RLeft := tt in flatten _ _ r' ;; boost _ _ _ _ _ (ga_second _ _ _ _ _ _ _) + | RRight a b c r' => let case_RRight := tt in flatten _ _ r' ;; boost _ _ _ _ _ (ga_first _ _ _ _ _ _ _) + | RComp c b a r1 r2 => let case_RComp := tt in (fun r1' r2' => _) (flatten _ _ r1) (flatten _ _ r2) + end); clear flatten; repeat take_simplify; repeat drop_simplify; intros. + + destruct case_RComp. + set (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) a)) as a' in *. + set (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) b)) as b' in *. + set (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) c)) as c' in *. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RCanL ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply + (@RLet Γ Δ [] [] (@ga_mk _ (v2t ec) a' c') (@ga_mk _ (v2t ec) a' b')) ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod. + apply r2'. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RuCanL ]. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RCanR ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply + (@RLet Γ Δ [@ga_mk _ (v2t ec) a' b' @@ _] [] (@ga_mk _ (v2t ec) a' c') (@ga_mk _ (v2t ec) b' c'))]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + eapply nd_prod. + apply r1'. + apply ga_comp. + Defined. + + Definition flatten_arrangement : + forall Γ (Δ:CoercionEnv Γ) n + (ec:HaskTyVar Γ ECKind) (lev:HaskLevel Γ) (ant1 ant2:Tree ??(LeveledHaskType Γ ★)) (r:Arrange ant1 ant2) succ, + ND Rule + [Γ > Δ > mapOptionTree (flatten_leveled_type ) (drop_lev n ant1) + |- [@ga_mk _ (v2t ec) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) ant1)) + (mapOptionTree (flatten_type ○ unlev) succ) @@ nil]] + [Γ > Δ > mapOptionTree (flatten_leveled_type ) (drop_lev n ant2) + |- [@ga_mk _ (v2t ec) + (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: lev) ant2)) + (mapOptionTree (flatten_type ○ unlev) succ) @@ nil]]. + intros. + refine ( _ ;; (boost _ _ _ _ _ (postcompose _ _ _ _ _ _ _ (flatten_arrangement' Γ Δ ec lev ant1 ant2 r)))). + apply nd_rule. + apply RArrange. + refine ((fix flatten ant1 ant2 (r:Arrange ant1 ant2) := + match r as R in Arrange A B return + Arrange (mapOptionTree (flatten_leveled_type ) (drop_lev _ A)) + (mapOptionTree (flatten_leveled_type ) (drop_lev _ B)) with + | RCanL a => let case_RCanL := tt in RCanL _ + | RCanR a => let case_RCanR := tt in RCanR _ + | RuCanL a => let case_RuCanL := tt in RuCanL _ + | RuCanR a => let case_RuCanR := tt in RuCanR _ + | RAssoc a b c => let case_RAssoc := tt in RAssoc _ _ _ + | RCossa a b c => let case_RCossa := tt in RCossa _ _ _ + | RExch a b => let case_RExch := tt in RExch _ _ + | RWeak a => let case_RWeak := tt in RWeak _ + | RCont a => let case_RCont := tt in RCont _ + | RLeft a b c r' => let case_RLeft := tt in RLeft _ (flatten _ _ r') + | RRight a b c r' => let case_RRight := tt in RRight _ (flatten _ _ r') + | RComp a b c r1 r2 => let case_RComp := tt in RComp (flatten _ _ r1) (flatten _ _ r2) + end) ant1 ant2 r); clear flatten; repeat take_simplify; repeat drop_simplify; intros. + Defined. + + Definition flatten_arrangement'' : + forall Γ Δ ant1 ant2 succ (r:Arrange ant1 ant2), + ND Rule (mapOptionTree (flatten_judgment ) [Γ > Δ > ant1 |- succ]) + (mapOptionTree (flatten_judgment ) [Γ > Δ > ant2 |- succ]). + intros. + simpl. + set (getjlev succ) as succ_lev. + assert (succ_lev=getjlev succ). + reflexivity. + + destruct succ_lev. + apply nd_rule. + apply RArrange. + induction r; simpl. + apply RCanL. + apply RCanR. + apply RuCanL. + apply RuCanR. + apply RAssoc. + apply RCossa. + apply RExch. + apply RWeak. + apply RCont. + apply RLeft; auto. + apply RRight; auto. + eapply RComp; [ apply IHr1 | apply IHr2 ]. + + apply flatten_arrangement. + apply r. + Defined. + + Definition ga_join Γ Δ Σ₁ Σ₂ a b ec : + ND Rule [] [Γ > Δ > Σ₁ |- [@ga_mk _ ec [] a @@ nil]] -> + ND Rule [] [Γ > Δ > Σ₂ |- [@ga_mk _ ec [] b @@ nil]] -> + ND Rule [] [Γ > Δ > Σ₁,,Σ₂ |- [@ga_mk _ ec [] (a,,b) @@ nil]]. + intro pfa. + intro pfb. + apply secondify with (c:=a) in pfb. + eapply nd_comp. + Focus 2. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RLet ]. + eapply nd_comp; [ eapply nd_llecnac | idtac ]. + eapply nd_prod. + apply pfb. + clear pfb. + apply postcompose'. + eapply nd_comp. + apply pfa. + clear pfa. + apply boost. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RCanL ]. + apply precompose'. + apply ga_uncancelr. + apply nd_id. + Defined. + + Definition arrange_brak : forall Γ Δ ec succ t, + ND Rule + [Γ > Δ > mapOptionTree (flatten_leveled_type ) (drop_lev (ec :: nil) succ),, + [(@ga_mk _ (v2t ec) [] (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: nil) succ))) @@ nil] |- [t @@ nil]] + [Γ > Δ > mapOptionTree (flatten_leveled_type ) succ |- [t @@ nil]]. + intros. + unfold drop_lev. + set (@arrange' _ succ (levelMatch (ec::nil))) as q. + set (arrangeMap _ _ flatten_leveled_type q) as y. + eapply nd_comp. + Focus 2. + eapply nd_rule. + eapply RArrange. + apply y. + idtac. + clear y q. + simpl. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RLet ]. + apply nd_prod. + Focus 2. + apply nd_id. + idtac. + induction succ; try destruct a; simpl. + unfold take_lev. + unfold mkTakeFlags. + unfold mkFlags. + unfold bnot. + simpl. + destruct l as [t' lev']. + destruct lev' as [|ec' lev']. + simpl. + apply ga_id. + unfold levelMatch. + set (@eqd_dec (HaskLevel Γ) (haskLevelEqDecidable Γ) (ec' :: lev') (ec :: nil)) as q. + destruct q. + inversion e; subst. + simpl. + apply nd_rule. + unfold flatten_leveled_type. + simpl. + unfold flatten_type. + simpl. + unfold ga_mk. + simpl. + apply RVar. + simpl. + apply ga_id. + apply ga_id. + unfold take_lev. + simpl. + apply ga_join. + apply IHsucc1. + apply IHsucc2. + Defined. + + Definition arrange_esc : forall Γ Δ ec succ t, + ND Rule + [Γ > Δ > mapOptionTree (flatten_leveled_type ) succ |- [t @@ nil]] + [Γ > Δ > mapOptionTree (flatten_leveled_type ) (drop_lev (ec :: nil) succ),, + [(@ga_mk _ (v2t ec) [] (mapOptionTree (flatten_type ○ unlev) (take_lev (ec :: nil) succ))) @@ nil] |- [t @@ nil]]. + intros. + unfold drop_lev. + set (@arrange _ succ (levelMatch (ec::nil))) as q. + set (arrangeMap _ _ flatten_leveled_type q) as y. + eapply nd_comp. + eapply nd_rule. + eapply RArrange. + apply y. + idtac. + clear y q. + + induction succ. + destruct a. + destruct l. + simpl. + unfold mkDropFlags; simpl. + unfold take_lev. + unfold mkTakeFlags. + simpl. + destruct (General.list_eq_dec h0 (ec :: nil)). + simpl. + rewrite e. + apply nd_id. + simpl. + apply nd_rule. + apply RArrange. + apply RLeft. + apply RWeak. + simpl. + apply nd_rule. + unfold take_lev. + simpl. + apply RArrange. + apply RLeft. + apply RWeak. + apply (Prelude_error "escapifying code with multi-leaf antecedents is not supported"). + Defined. + + Lemma mapOptionTree_distributes + : forall T R (a b:Tree ??T) (f:T->R), + mapOptionTree f (a,,b) = (mapOptionTree f a),,(mapOptionTree f b). + reflexivity. + Qed. + + Definition decide_tree_empty : forall {T:Type}(t:Tree ??T), + sum { q:Tree unit & t = mapTree (fun _ => None) q } unit. + intro T. + refine (fix foo t := + match t with + | T_Leaf x => _ + | T_Branch b1 b2 => let b1' := foo b1 in let b2' := foo b2 in _ + end). + intros. + destruct x. + right; apply tt. + left. + exists (T_Leaf tt). + auto. + destruct b1'. + destruct b2'. + destruct s. + destruct s0. + subst. + left. + exists (x,,x0). + reflexivity. + right; auto. + right; auto. + Defined. + + Lemma unlev_relev : forall {Γ}(t:Tree ??(HaskType Γ ★)) lev, mapOptionTree unlev (t @@@ lev) = t. + intros. + induction t. + destruct a; reflexivity. + rewrite <- IHt1 at 2. + rewrite <- IHt2 at 2. + reflexivity. + Qed. + + Lemma tree_of_nothing : forall Γ ec t a, + Arrange (a,,mapOptionTree flatten_leveled_type (drop_lev(Γ:=Γ) (ec :: nil) (t @@@ (ec :: nil)))) a. + intros. + induction t; try destruct o; try destruct a0. + simpl. + drop_simplify. + simpl. + apply RCanR. + simpl. + apply RCanR. + Opaque drop_lev. + simpl. + Transparent drop_lev. + drop_simplify. + simpl. + eapply RComp. + eapply RComp. + eapply RAssoc. + eapply RRight. + apply IHt1. + apply IHt2. + Defined. + + Lemma tree_of_nothing' : forall Γ ec t a, + Arrange a (a,,mapOptionTree flatten_leveled_type (drop_lev(Γ:=Γ) (ec :: nil) (t @@@ (ec :: nil)))). + intros. + induction t; try destruct o; try destruct a0. + simpl. + drop_simplify. + simpl. + apply RuCanR. + simpl. + apply RuCanR. + Opaque drop_lev. + simpl. + Transparent drop_lev. + drop_simplify. + simpl. + eapply RComp. + Focus 2. + eapply RComp. + Focus 2. + eapply RCossa. + Focus 2. + eapply RRight. + apply IHt1. + apply IHt2. + Defined. + + Lemma krunk : forall Γ (ec:HaskTyVar Γ ECKind) t, + flatten_type (<[ ec |- t ]>) + = @ga_mk Γ (v2t ec) + (mapOptionTree flatten_type (take_arg_types_as_tree t)) + [ flatten_type (drop_arg_types_as_tree t)]. + + intros. + unfold flatten_type at 1. + simpl. + unfold ga_mk. + unfold v2t. + admit. (* BIG HUGE CHEAT FIXME *) + Qed. + + Definition flatten_proof : + forall {h}{c}, + ND SRule h c -> + ND Rule (mapOptionTree (flatten_judgment ) h) (mapOptionTree (flatten_judgment ) c). + intros. + eapply nd_map'; [ idtac | apply X ]. + clear h c X. + intros. + simpl in *. + + refine + (match X as R in SRule H C with + | SBrak Γ Δ t ec succ lev => let case_SBrak := tt in _ + | SEsc Γ Δ t ec succ lev => let case_SEsc := tt in _ + | SFlat h c r => let case_SFlat := tt in _ + end). + + destruct case_SFlat. + refine (match r as R in Rule H C with + | RArrange Γ Δ a b x d => let case_RArrange := tt in _ + | RNote Γ Δ Σ τ l n => let case_RNote := tt in _ + | RLit Γ Δ l _ => let case_RLit := tt in _ + | RVar Γ Δ σ lev => let case_RVar := tt in _ + | RGlobal Γ Δ σ l wev => let case_RGlobal := tt in _ + | RLam Γ Δ Σ tx te lev => let case_RLam := tt in _ + | RCast Γ Δ Σ σ τ lev γ => let case_RCast := tt in _ + | RAbsT Γ Δ Σ κ σ lev => let case_RAbsT := tt in _ + | RAppT Γ Δ Σ κ σ τ lev => let case_RAppT := tt in _ + | RAppCo Γ Δ Σ κ σ₁ σ₂ γ σ lev => let case_RAppCo := tt in _ + | RAbsCo Γ Δ Σ κ σ σ₁ σ₂ lev => let case_RAbsCo := tt in _ + | RApp Γ Δ Σ₁ Σ₂ tx te lev => let case_RApp := tt in _ + | RLet Γ Δ Σ₁ Σ₂ σ₁ σ₂ lev => let case_RLet := tt in _ + | RJoin Γ p lri m x q => let case_RJoin := tt in _ + | RVoid _ _ => let case_RVoid := tt in _ + | RBrak Γ Δ t ec succ lev => let case_RBrak := tt in _ + | REsc Γ Δ t ec succ lev => let case_REsc := tt in _ + | RCase Γ Δ lev tc Σ avars tbranches alts => let case_RCase := tt in _ + | RLetRec Γ Δ lri x y t => let case_RLetRec := tt in _ + end); clear X h c. + + destruct case_RArrange. + apply (flatten_arrangement'' Γ Δ a b x d). + + destruct case_RBrak. + apply (Prelude_error "found unskolemized Brak rule; this shouldn't happen"). + + destruct case_REsc. + apply (Prelude_error "found unskolemized Esc rule; this shouldn't happen"). + + destruct case_RNote. + simpl. + destruct l; simpl. + apply nd_rule; apply RNote; auto. + apply nd_rule; apply RNote; auto. + + destruct case_RLit. + simpl. + destruct l0; simpl. + unfold flatten_leveled_type. + simpl. + rewrite literal_types_unchanged. + apply nd_rule; apply RLit. + unfold take_lev; simpl. + unfold drop_lev; simpl. + simpl. + rewrite literal_types_unchanged. + apply ga_lit. + + destruct case_RVar. + Opaque flatten_judgment. + simpl. + Transparent flatten_judgment. + idtac. + unfold flatten_judgment. + unfold getjlev. + destruct lev. + apply nd_rule. apply RVar. + repeat drop_simplify. + repeat take_simplify. + simpl. + apply ga_id. + + destruct case_RGlobal. + simpl. + rename l into g. + rename σ into l. + destruct l as [|ec lev]; simpl. + destruct (eqd_dec (g:CoreVar) (hetmet_flatten:CoreVar)). + set (flatten_type (g wev)) as t. + set (RGlobal _ Δ nil (mkGlobal Γ t hetmet_id)) as q. + simpl in q. + apply nd_rule. + apply q. + apply INil. + destruct (eqd_dec (g:CoreVar) (hetmet_unflatten:CoreVar)). + set (flatten_type (g wev)) as t. + set (RGlobal _ Δ nil (mkGlobal Γ t hetmet_id)) as q. + simpl in q. + apply nd_rule. + apply q. + apply INil. + unfold flatten_leveled_type. simpl. + apply nd_rule; rewrite globals_do_not_have_code_types. + apply RGlobal. + apply (Prelude_error "found RGlobal at depth >0; globals should never appear inside code brackets unless escaped"). + + destruct case_RLam. + Opaque drop_lev. + Opaque take_lev. + simpl. + destruct lev as [|ec lev]; simpl; [ apply nd_rule; apply RLam; auto | idtac ]. + repeat drop_simplify. + repeat take_simplify. + eapply nd_comp. + eapply nd_rule. + eapply RArrange. + simpl. + apply RCanR. + apply boost. + simpl. + apply ga_curry. + + destruct case_RCast. + simpl. + destruct lev as [|ec lev]; simpl; [ apply nd_rule; apply RCast; auto | idtac ]. + simpl. + apply flatten_coercion; auto. + apply (Prelude_error "RCast at level >0; casting inside of code brackets is currently not supported"). + + destruct case_RJoin. + simpl. + destruct (getjlev x); destruct (getjlev q); + [ apply nd_rule; apply RJoin | idtac | idtac | idtac ]; + apply (Prelude_error "RJoin at depth >0"). + + destruct case_RApp. + simpl. + + destruct lev as [|ec lev]. simpl. apply nd_rule. + unfold flatten_leveled_type at 4. + unfold flatten_leveled_type at 2. + simpl. + replace (flatten_type (tx ---> te)) + with (flatten_type tx ---> flatten_type te). + apply RApp. + reflexivity. + + repeat drop_simplify. + repeat take_simplify. + rewrite mapOptionTree_distributes. + set (mapOptionTree (flatten_leveled_type ) (drop_lev (ec :: lev) Σ₁)) as Σ₁'. + set (mapOptionTree (flatten_leveled_type ) (drop_lev (ec :: lev) Σ₂)) as Σ₂'. + set (take_lev (ec :: lev) Σ₁) as Σ₁''. + set (take_lev (ec :: lev) Σ₂) as Σ₂''. + replace (flatten_type (tx ---> te)) with ((flatten_type tx) ---> (flatten_type te)). + apply (Prelude_error "FIXME: ga_apply"). + reflexivity. + +(* + Notation "` x" := (take_lev _ x). + Notation "`` x" := (mapOptionTree unlev x) (at level 20). + Notation "``` x" := ((drop_lev _ x)) (at level 20). + Notation "!<[]> x" := (flatten_type _ x) (at level 1). + Notation "!<[@]> x" := (mapOptionTree flatten_leveled_type x) (at level 1). +*) + + destruct case_RLet. + simpl. + destruct lev as [|ec lev]; simpl; [ apply nd_rule; apply RLet; auto | idtac ]. + repeat drop_simplify. + repeat take_simplify. + simpl. + + eapply nd_comp. + eapply nd_prod; [ idtac | apply nd_id ]. + eapply boost. + apply ga_second. + + eapply nd_comp. + eapply nd_prod. + apply nd_id. + eapply nd_comp. + eapply nd_rule. + eapply RArrange. + apply RCanR. + eapply precompose. + + apply nd_rule. + apply RLet. + + destruct case_RVoid. + simpl. + apply nd_rule. + apply RVoid. + + destruct case_RAppT. + simpl. destruct lev; simpl. + unfold flatten_leveled_type. + simpl. + rewrite flatten_commutes_with_HaskTAll. + rewrite flatten_commutes_with_substT. + apply nd_rule. + apply RAppT. + apply Δ. + apply Δ. + apply (Prelude_error "found type application at level >0; this is not supported"). + + destruct case_RAbsT. + simpl. destruct lev; simpl. + unfold flatten_leveled_type at 4. + unfold flatten_leveled_type at 2. + simpl. + rewrite flatten_commutes_with_HaskTAll. + rewrite flatten_commutes_with_HaskTApp. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RAbsT ]. + simpl. + set (mapOptionTree (flatten_leveled_type ) (mapOptionTree (weakLT(κ:=κ)) Σ)) as a. + set (mapOptionTree (weakLT(κ:=κ)) (mapOptionTree (flatten_leveled_type ) Σ)) as q'. + assert (a=q'). + unfold a. + unfold q'. + clear a q'. + induction Σ. + destruct a. + simpl. + rewrite flatten_commutes_with_weakLT. + reflexivity. + reflexivity. + simpl. + rewrite <- IHΣ1. + rewrite <- IHΣ2. + reflexivity. + rewrite H. + apply nd_id. + apply Δ. + apply Δ. + apply (Prelude_error "found type abstraction at level >0; this is not supported"). + + destruct case_RAppCo. + simpl. destruct lev; simpl. + unfold flatten_leveled_type at 4. + unfold flatten_leveled_type at 2. + unfold flatten_type. + simpl. + apply nd_rule. + apply RAppCo. + apply flatten_coercion. + apply γ. + apply (Prelude_error "found coercion application at level >0; this is not supported"). + + destruct case_RAbsCo. + simpl. destruct lev; simpl. + unfold flatten_type. + simpl. + apply (Prelude_error "AbsCo not supported (FIXME)"). + apply (Prelude_error "found coercion abstraction at level >0; this is not supported"). + + destruct case_RLetRec. + rename t into lev. + simpl. + apply (Prelude_error "LetRec not supported (FIXME)"). + + destruct case_RCase. + simpl. + apply (Prelude_error "Case not supported (BIG FIXME)"). + + destruct case_SBrak. + simpl. + destruct lev. + drop_simplify. + set (drop_lev (ec :: nil) (take_arg_types_as_tree t @@@ (ec :: nil))) as empty_tree. + take_simplify. + rewrite take_lemma'. + simpl. + rewrite mapOptionTree_compose. + rewrite mapOptionTree_compose. + rewrite unlev_relev. + rewrite <- mapOptionTree_compose. + unfold flatten_leveled_type at 4. + simpl. + rewrite krunk. + set (mapOptionTree flatten_leveled_type (drop_lev (ec :: nil) succ)) as succ_host. + set (mapOptionTree (flatten_type ○ unlev)(take_lev (ec :: nil) succ)) as succ_guest. + set (mapOptionTree flatten_type (take_arg_types_as_tree t)) as succ_args. + unfold empty_tree. + eapply nd_comp; [ eapply nd_rule; eapply RArrange; apply tree_of_nothing | idtac ]. + refine (ga_unkappa' Γ Δ (v2t ec) nil _ _ _ _ ;; _). + unfold succ_host. + unfold succ_guest. + apply arrange_brak. + apply (Prelude_error "found Brak at depth >0 indicating 3-level code; only two-level code is currently supported"). + + destruct case_SEsc. + simpl. + destruct lev. + simpl. + unfold flatten_leveled_type at 2. + simpl. + rewrite krunk. + rewrite mapOptionTree_compose. + take_simplify. + drop_simplify. + simpl. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply tree_of_nothing' ]. + simpl. + rewrite take_lemma'. + rewrite unlev_relev. + rewrite <- mapOptionTree_compose. + eapply nd_comp; [ apply (arrange_esc _ _ ec) | idtac ]. + + set (decide_tree_empty (take_lev (ec :: nil) succ)) as q'. + destruct q'. + destruct s. + rewrite e. + clear e. + + set (mapOptionTree flatten_leveled_type (drop_lev (ec :: nil) succ)) as succ_host. + set (mapOptionTree flatten_type (take_arg_types_as_tree t)) as succ_args. + + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; apply RCanR ]. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RLet ]. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply nd_prod; [ idtac | eapply boost ]. + induction x. + apply ga_id. + eapply nd_comp; [ idtac | eapply nd_rule; eapply RArrange; eapply RCanR ]. + simpl. + apply ga_join. + apply IHx1. + apply IHx2. + simpl. + apply postcompose. + + refine ( _ ;; (boost _ _ _ _ _ (postcompose _ _ _ _ _ _ _ _))). + apply ga_cancell. + apply firstify. + + induction x. + destruct a; simpl. + apply ga_id. + simpl. + refine ( _ ;; (boost _ _ _ _ _ (postcompose _ _ _ _ _ _ _ _))). + apply ga_cancell. + refine ( _ ;; (boost _ _ _ _ _ (postcompose _ _ _ _ _ _ _ _))). + eapply firstify. + apply IHx1. + apply secondify. + apply IHx2. + + (* environment has non-empty leaves *) + apply ga_kappa'. + + (* nesting too deep *) + apply (Prelude_error "found Esc at depth >0 indicating 3-level code; only two-level code is currently supported"). + Defined. + + Definition skolemize_and_flatten_proof : + forall {h}{c}, + ND Rule h c -> + ND Rule + (mapOptionTree (flatten_judgment ○ skolemize_judgment) h) + (mapOptionTree (flatten_judgment ○ skolemize_judgment) c). + intros. + rewrite mapOptionTree_compose. + rewrite mapOptionTree_compose. + apply flatten_proof. + apply skolemize_proof. + apply X. + Defined. + + + (* to do: establish some metric on judgments (max length of level of any succedent type, probably), show how to + * calculate it, and show that the flattening procedure above drives it down by one *) + + (* + Instance FlatteningFunctor {Γ}{Δ}{ec} : Functor (JudgmentsL (PCF Γ Δ ec)) (TypesL (SystemFCa Γ Δ)) (obact) := + { fmor := FlatteningFunctor_fmor }. + + Definition ReificationFunctor Γ Δ : Functor (JudgmentsL _ _ (PCF n Γ Δ)) SystemFCa' (mapOptionTree brakifyJudg). + refine {| fmor := ReificationFunctor_fmor Γ Δ |}; unfold hom; unfold ob; simpl ; intros. + + Definition PCF_SMME (n:nat)(Γ:TypeEnv)(Δ:CoercionEnv Γ) : ProgrammingLanguageSMME. + refine {| plsmme_pl := PCF n Γ Δ |}. + Defined. + + Definition SystemFCa_SMME (n:nat)(Γ:TypeEnv)(Δ:CoercionEnv Γ) : ProgrammingLanguageSMME. + refine {| plsmme_pl := SystemFCa n Γ Δ |}. + Defined. + + Definition ReificationFunctorMonoidal n : MonoidalFunctor (JudgmentsN n) (JudgmentsN (S n)) (ReificationFunctor n). + Defined. + + (* 5.1.4 *) + Definition PCF_SystemFCa_two_level n Γ Δ : TwoLevelLanguage (PCF_SMME n Γ Δ) (SystemFCa_SMME (S n) Γ Δ). + Defined. + *) + (* ... and the retraction exists *) + +End HaskFlattener. + +Implicit Arguments garrow [ ]. diff --git a/src/HaskKinds.v b/src/HaskKinds.v index 3539e95..d575d12 100644 --- a/src/HaskKinds.v +++ b/src/HaskKinds.v @@ -37,6 +37,10 @@ Instance KindToString : ToString Kind := { toString := kindToString }. Notation "'★'" := KindStar. Notation "a ⇛ b" := (KindArrow a b). +(* the kind of environment classifiers *) +Definition ECKind := ★ ⇛ ★ ⇛ ★. +Opaque ECKind. + Fixpoint kindToLatexMath (k:Kind) : LatexMath := match k with | ★ => rawLatexMath "\star" diff --git a/src/HaskLiteralsAndTyCons.v b/src/HaskLiterals.v similarity index 69% rename from src/HaskLiteralsAndTyCons.v rename to src/HaskLiterals.v index 62d638b..c8a2651 100644 --- a/src/HaskLiteralsAndTyCons.v +++ b/src/HaskLiterals.v @@ -1,5 +1,5 @@ (*********************************************************************************************************************************) -(* HaskLiteralsAndTyCons: representation of compile-time constants (literals) *) +(* HaskLiterals: representation of compile-time constants (literals) *) (*********************************************************************************************************************************) Generalizable All Variables. @@ -7,12 +7,7 @@ Require Import Preamble. Require Import General. Require Import Coq.Strings.String. Require Import HaskKinds. - -Variable CoreDataCon : Type. Extract Inlined Constant CoreDataCon => "DataCon.DataCon". - -(* once again, we pull the trick of having multiple Coq types map to a single Haskell type to provide stronger typing *) -Variable TyCon : Type. Extract Inlined Constant TyCon => "TyCon.TyCon". -Variable TyFun : Type. Extract Inlined Constant TyFun => "TyCon.TyCon". +Require Import HaskTyCons. (* Since GHC is written in Haskell, compile-time Haskell constants are represented using Haskell (Prelude) types *) Variable HaskInt : Type. Extract Inlined Constant HaskInt => "Prelude.Int". @@ -21,11 +16,6 @@ Variable HaskFastString : Type. Extract Inlined Constant H Variable HaskInteger : Type. Extract Inlined Constant HaskInteger => "Prelude.Integer". Variable HaskRational : Type. Extract Inlined Constant HaskRational => "Prelude.Rational". -Variable CoreName : Type. Extract Inlined Constant CoreName => "Name.Name". -Variable Class_ : Type. Extract Inlined Constant Class_ => "Class.Class". -Variable CoreIPName : Type -> Type. Extract Constant CoreIPName "’a" => "BasicTypes.IPName". - Extraction Inline CoreIPName. - (* This type extracts exactly onto GHC's Literal.Literal type *) Inductive HaskLiteral := | HaskMachChar : HaskChar -> HaskLiteral @@ -81,13 +71,3 @@ match lit with | HaskMachDouble _ => doublePrimTyCon | HaskMachLabel _ _ _ => addrPrimTyCon end. - -Variable tyConToString : TyCon -> string. Extract Inlined Constant tyConToString => "outputableToString". -Variable tyFunToString : TyFun -> string. Extract Inlined Constant tyFunToString => "outputableToString". -Instance TyConToString : ToString TyCon := { toString := tyConToString }. -Instance TyFunToString : ToString TyFun := { toString := tyFunToString }. -Instance TyConToLatex : ToLatex TyCon := { toLatex := fun x => toLatex (toString x) }. -Instance TyFunToLatex : ToLatex TyCon := { toLatex := fun x => toLatex (toString x) }. - -Variable ModalBoxTyCon : TyCon. Extract Inlined Constant ModalBoxTyCon => "TysWiredIn.hetMetCodeTypeTyCon". -Variable ArrowTyCon : TyCon. Extract Constant ArrowTyCon => "Type.funTyCon". diff --git a/src/HaskProgrammingLanguage.v b/src/HaskProgrammingLanguage.v new file mode 100644 index 0000000..8aba304 --- /dev/null +++ b/src/HaskProgrammingLanguage.v @@ -0,0 +1,195 @@ +(*********************************************************************************************************************************) +(* HaskProgrammingLanguage: *) +(* *) +(* System FC^\alpha is a ProgrammingLanguage. *) +(* *) +(*********************************************************************************************************************************) + +Generalizable All Variables. +Require Import Preamble. +Require Import General. +Require Import NaturalDeduction. +Require Import Coq.Strings.String. +Require Import Coq.Lists.List. + +Require Import Algebras_ch4. +Require Import Categories_ch1_3. +Require Import Functors_ch1_4. +Require Import Isomorphisms_ch1_5. +Require Import ProductCategories_ch1_6_1. +Require Import OppositeCategories_ch1_6_2. +Require Import Enrichment_ch2_8. +Require Import Subcategories_ch7_1. +Require Import NaturalTransformations_ch7_4. +Require Import NaturalIsomorphisms_ch7_5. +Require Import MonoidalCategories_ch7_8. +Require Import Coherence_ch7_8. + +Require Import HaskKinds. +Require Import HaskCoreTypes. +Require Import HaskLiterals. +Require Import HaskTyCons. +Require Import HaskStrongTypes. +Require Import HaskProof. +Require Import NaturalDeduction. +Require Import NaturalDeductionCategory. + +Require Import HaskStrongTypes. +Require Import HaskStrong. +Require Import HaskProof. +Require Import HaskStrongToProof. +Require Import HaskProofToStrong. +Require Import ProgrammingLanguage. + +Open Scope nd_scope. + +(* The judgments any specific Γ,Δ form a category with proofs as morphisms *) +Section HaskProgrammingLanguage. + + Context (ndr_systemfc:@ND_Relation _ Rule). + + Context Γ (Δ:CoercionEnv Γ). + + + Definition JudgΓΔ := prod (Tree ??(LeveledHaskType Γ ★)) (Tree ??(LeveledHaskType Γ ★)). + + Definition RuleΓΔ : Tree ??JudgΓΔ -> Tree ??JudgΓΔ -> Type := + fun h c => + Rule + (mapOptionTree (fun j => Γ > Δ > fst j |- snd j) h) + (mapOptionTree (fun j => Γ > Δ > fst j |- snd j) c). + + Definition SystemFCa_cut : forall a b c, ND RuleΓΔ ([(a,b)],,[(b,c)]) [(a,c)]. + intros. + destruct b. + destruct o. + destruct c. + destruct o. + + (* when the cut is a single leaf and the RHS is a single leaf: *) + (* + eapply nd_comp. + eapply nd_prod. + apply nd_id. + eapply nd_rule. + set (@org_fc) as ofc. + set (RArrange Γ Δ _ _ _ (RuCanL [l0])) as rule. + apply org_fc with (r:=RArrange _ _ _ _ _ (RuCanL [_])). + auto. + eapply nd_comp; [ idtac | eapply nd_rule; apply org_fc with (r:=RArrange _ _ _ _ _ (RCanL _)) ]. + apply nd_rule. + destruct l. + destruct l0. + assert (h0=h2). admit. + subst. + apply org_fc with (r:=@RLet Γ Δ [] a h1 h h2). + auto. + auto. + *) + admit. + apply (Prelude_error "systemfc cut rule invoked with [a|=[b]] [[b]|=[]]"). + apply (Prelude_error "systemfc cut rule invoked with [a|=[b]] [[b]|=[x,,y]]"). + apply (Prelude_error "systemfc rule invoked with [a|=[]] [[]|=c]"). + apply (Prelude_error "systemfc rule invoked with [a|=[b,,c]] [[b,,c]|=z]"). + Defined. + + Instance SystemFCa_sequents : @SequentND _ RuleΓΔ _ _ := + { snd_cut := SystemFCa_cut }. + apply Build_SequentND. + intros. + induction a. + destruct a; simpl. + (* + apply nd_rule. + destruct l. + apply org_fc with (r:=RVar _ _ _ _). + auto. + apply nd_rule. + apply org_fc with (r:=RVoid _ _ ). + auto. + eapply nd_comp. + eapply nd_comp; [ apply nd_llecnac | idtac ]. + apply (nd_prod IHa1 IHa2). + apply nd_rule. + apply org_fc with (r:=RJoin _ _ _ _ _ _). + auto. + admit. + *) + admit. + admit. + admit. + admit. + Defined. + + Definition SystemFCa_left a b c : ND RuleΓΔ [(b,c)] [((a,,b),(a,,c))]. + admit. + (* + eapply nd_comp; [ apply nd_llecnac | eapply nd_comp; [ idtac | idtac ] ]. + eapply nd_prod; [ apply snd_initial | apply nd_id ]. + apply nd_rule. + apply org_fc with (r:=RJoin Γ Δ a b a c). + auto. + *) + Defined. + + Definition SystemFCa_right a b c : ND RuleΓΔ [(b,c)] [((b,,a),(c,,a))]. + admit. + (* + eapply nd_comp; [ apply nd_rlecnac | eapply nd_comp; [ idtac | idtac ] ]. + eapply nd_prod; [ apply nd_id | apply snd_initial ]. + apply nd_rule. + apply org_fc with (r:=RJoin Γ Δ b a c a). + auto. + *) + Defined. + + Instance SystemFCa_sequent_join : @ContextND _ _ _ _ SystemFCa_sequents := + { cnd_expand_left := fun a b c => SystemFCa_left c a b + ; cnd_expand_right := fun a b c => SystemFCa_right c a b }. + (* + intros; apply nd_rule. simpl. + apply (org_fc _ _ _ _ ((RArrange _ _ _ _ _ (RCossa _ _ _)))). + auto. + + intros; apply nd_rule. simpl. + apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RAssoc _ _ _))); auto. + + intros; apply nd_rule. simpl. + apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RCanL _))); auto. + + intros; apply nd_rule. simpl. + apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RCanR _))); auto. + + intros; apply nd_rule. simpl. + apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RuCanL _))); auto. + + intros; apply nd_rule. simpl. + apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RuCanR _))); auto. + *) + admit. + admit. + admit. + admit. + admit. + admit. + Defined. + + Instance OrgFC : @ND_Relation _ RuleΓΔ. + Admitted. + + Instance OrgFC_SequentND_Relation : SequentND_Relation SystemFCa_sequent_join OrgFC. + admit. + Defined. + + Definition OrgFC_ContextND_Relation + : @ContextND_Relation _ _ _ _ _ SystemFCa_sequent_join OrgFC OrgFC_SequentND_Relation. + admit. + Defined. + + (* 5.1.2 *) + Instance SystemFCa : @ProgrammingLanguage (LeveledHaskType Γ ★) _ := + { pl_eqv := OrgFC_ContextND_Relation + ; pl_snd := SystemFCa_sequents + }. + +End HaskProgrammingLanguage. diff --git a/src/HaskProof.v b/src/HaskProof.v index a5e4abd..ee536b0 100644 --- a/src/HaskProof.v +++ b/src/HaskProof.v @@ -14,7 +14,8 @@ Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import HaskKinds. Require Import HaskCoreTypes. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskStrongTypes. Require Import HaskWeakVars. @@ -73,14 +74,14 @@ Inductive Rule : Tree ??Judg -> Tree ??Judg -> Type := (* SystemFC rules *) | RVar : ∀ Γ Δ σ l, Rule [ ] [Γ>Δ> [σ@@l] |- [σ @@l]] -| RGlobal : ∀ Γ Δ τ l, WeakExprVar -> Rule [ ] [Γ>Δ> [] |- [τ @@l]] +| RGlobal : forall Γ Δ l (g:Global Γ) v, Rule [ ] [Γ>Δ> [] |- [g v @@l]] | RLam : forall Γ Δ Σ (tx:HaskType Γ ★) te l, Rule [Γ>Δ> Σ,,[tx@@l]|- [te@@l] ] [Γ>Δ> Σ |- [tx--->te @@l]] | RCast : forall Γ Δ Σ (σ₁ σ₂:HaskType Γ ★) l, HaskCoercion Γ Δ (σ₁∼∼∼σ₂) -> Rule [Γ>Δ> Σ |- [σ₁@@l] ] [Γ>Δ> Σ |- [σ₂ @@l]] | RJoin : ∀ Γ Δ Σ₁ Σ₂ τ₁ τ₂ , Rule ([Γ > Δ > Σ₁ |- τ₁ ],,[Γ > Δ > Σ₂ |- τ₂ ]) [Γ>Δ> Σ₁,,Σ₂ |- τ₁,,τ₂ ] -| RApp : ∀ Γ Δ Σ₁ Σ₂ tx te l, Rule ([Γ>Δ> Σ₁ |- [tx--->te @@l]],,[Γ>Δ> Σ₂ |- [tx@@l]]) [Γ>Δ> Σ₁,,Σ₂ |- [te @@l]] +| RApp : ∀ Γ Δ Σ₁ Σ₂ tx te l, Rule ([Γ>Δ> Σ₁ |- [tx@@l]],,[Γ>Δ> Σ₂ |- [tx--->te @@l]]) [Γ>Δ> Σ₁,,Σ₂ |- [te @@l]] | RLet : ∀ Γ Δ Σ₁ Σ₂ σ₁ σ₂ l, Rule ([Γ>Δ> Σ₂ |- [σ₂@@l]],,[Γ>Δ> Σ₁,,[σ₂@@l] |- [σ₁@@l] ]) [Γ>Δ> Σ₁,,Σ₂ |- [σ₁ @@l]] @@ -168,4 +169,40 @@ Lemma systemfc_all_rules_one_conclusion : forall h c1 c2 (r:Rule h (c1,,c2)), Fa auto. Qed. - +(* "Arrange" objects are parametric in the type of the leaves of the tree *) +Definition arrangeMap : + forall {T} (Σ₁ Σ₂:Tree ??T) {R} (f:T -> R), + Arrange Σ₁ Σ₂ -> + Arrange (mapOptionTree f Σ₁) (mapOptionTree f Σ₂). + intros. + induction X; simpl. + apply RCanL. + apply RCanR. + apply RuCanL. + apply RuCanR. + apply RAssoc. + apply RCossa. + apply RExch. + apply RWeak. + apply RCont. + apply RLeft; auto. + apply RRight; auto. + eapply RComp; [ apply IHX1 | apply IHX2 ]. + Defined. + +(* a frequently-used Arrange *) +Definition arrangeSwapMiddle {T} (a b c d:Tree ??T) : + Arrange ((a,,b),,(c,,d)) ((a,,c),,(b,,d)). + eapply RComp. + apply RCossa. + eapply RComp. + eapply RLeft. + eapply RComp. + eapply RAssoc. + eapply RRight. + apply RExch. + eapply RComp. + eapply RLeft. + eapply RCossa. + eapply RAssoc. + Defined. diff --git a/src/HaskProofFlattener.v b/src/HaskProofFlattener.v deleted file mode 100644 index 7b70e6e..0000000 --- a/src/HaskProofFlattener.v +++ /dev/null @@ -1,407 +0,0 @@ -(*********************************************************************************************************************************) -(* HaskProofFlattener: *) -(* *) -(* The Flattening Functor. *) -(* *) -(*********************************************************************************************************************************) - -Generalizable All Variables. -Require Import Preamble. -Require Import General. -Require Import NaturalDeduction. -Require Import Coq.Strings.String. -Require Import Coq.Lists.List. - -Require Import HaskKinds. -Require Import HaskCoreTypes. -Require Import HaskLiteralsAndTyCons. -Require Import HaskStrongTypes. -Require Import HaskProof. -Require Import NaturalDeduction. -Require Import NaturalDeductionCategory. - -Require Import Algebras_ch4. -Require Import Categories_ch1_3. -Require Import Functors_ch1_4. -Require Import Isomorphisms_ch1_5. -Require Import ProductCategories_ch1_6_1. -Require Import OppositeCategories_ch1_6_2. -Require Import Enrichment_ch2_8. -Require Import Subcategories_ch7_1. -Require Import NaturalTransformations_ch7_4. -Require Import NaturalIsomorphisms_ch7_5. -Require Import BinoidalCategories. -Require Import PreMonoidalCategories. -Require Import MonoidalCategories_ch7_8. -Require Import Coherence_ch7_8. - -Require Import HaskStrongTypes. -Require Import HaskStrong. -Require Import HaskProof. -Require Import HaskStrongToProof. -Require Import HaskProofToStrong. -Require Import ProgrammingLanguage. -Require Import HaskProofStratified. - -Open Scope nd_scope. - - -(* - * The flattening transformation. Currently only TWO-level languages are - * supported, and the level-1 sublanguage is rather limited. -* - * This file abuses terminology pretty badly. For purposes of this file, - * "PCF" means "the level-1 sublanguage" and "FC" (aka System FC) means - * the whole language (level-0 language including bracketed level-1 terms) - *) -Section HaskProofFlattener. - - -(* - Definition code2garrow0 {Γ}(ec t1 t2:RawHaskType Γ ★) : RawHaskType Γ ★. - admit. - Defined. - Definition code2garrow Γ (ec t:RawHaskType Γ ★) := - match t with -(* | TApp ★ ★ (TApp _ ★ TArrow tx) t' => code2garrow0 ec tx t'*) - | _ => code2garrow0 ec unitType t - end. - Opaque code2garrow. - Fixpoint typeMap {TV}{κ}(ty:@RawHaskType TV κ) : @RawHaskType TV κ := - match ty as TY in RawHaskType _ K return RawHaskType TV K with - | TCode ec t => code2garrow _ ec t - | TApp _ _ t1 t2 => TApp (typeMap t1) (typeMap t2) - | TAll _ f => TAll _ (fun tv => typeMap (f tv)) - | TCoerc _ t1 t2 t3 => TCoerc (typeMap t1) (typeMap t2) (typeMap t3) - | TVar _ v => TVar v - | TArrow => TArrow - | TCon tc => TCon tc - | TyFunApp tf rhtl => (* FIXME *) TyFunApp tf rhtl - end. -*) - - -(* - Definition code2garrow Γ (ec t:RawHaskType Γ ★) := - match t with -(* | TApp ★ ★ (TApp _ ★ TArrow tx) t' => code2garrow0 ec tx t'*) - | _ => code2garrow0 ec unitType t - end. - Opaque code2garrow. - Fixpoint typeMap {TV}{κ}(ty:@RawHaskType TV κ) : @RawHaskType TV κ := - match ty as TY in RawHaskType _ K return RawHaskType TV K with - | TCode ec t => code2garrow _ ec t - | TApp _ _ t1 t2 => TApp (typeMap t1) (typeMap t2) - | TAll _ f => TAll _ (fun tv => typeMap (f tv)) - | TCoerc _ t1 t2 t3 => TCoerc (typeMap t1) (typeMap t2) (typeMap t3) - | TVar _ v => TVar v - | TArrow => TArrow - | TCon tc => TCon tc - | TyFunApp tf rhtl => (* FIXME *) TyFunApp tf rhtl - end. - - Definition typeMapL {Γ}(lht:LeveledHaskType Γ ★) : LeveledHaskType Γ ★ := - match lht with -(* | t @@ nil => (fun TV ite => typeMap (t TV ite)) @@ lev*) - | t @@ lev => (fun TV ite => typeMap (t TV ite)) @@ lev - end. -*) - - (* gathers a tree of guest-language types into a single host-language types via the tensor *) - Definition tensorizeType {Γ} (lt:Tree ??(HaskType Γ ★)) : HaskType Γ ★. - admit. - Defined. - - Definition mkGA {Γ} : HaskType Γ ★ -> HaskType Γ ★ -> HaskType Γ ★. - admit. - Defined. - - Definition guestJudgmentAsGArrowType {Γ}{Δ}(lt:PCFJudg Γ Δ) : HaskType Γ ★ := - match lt with - (x,y) => (mkGA (tensorizeType x) (tensorizeType y)) - end. - - Definition obact {Γ}{Δ} (X:Tree ??(PCFJudg Γ Δ)) : Tree ??(LeveledHaskType Γ ★) := - mapOptionTree guestJudgmentAsGArrowType X @@@ nil. - - Hint Constructors Rule_Flat. - Context {ndr:@ND_Relation _ Rule}. - - (* - * Here it is, what you've all been waiting for! When reading this, - * it might help to have the definition for "Inductive ND" (see - * NaturalDeduction.v) handy as a cross-reference. - *) - Hint Constructors Rule_Flat. - Definition FlatteningFunctor_fmor {Γ}{Δ}{ec} - : forall h c, - (h~~{JudgmentsL (PCF Γ Δ ec)}~~>c) -> - ((obact(Δ:=ec) h)~~{TypesL (SystemFCa Γ Δ)}~~>(obact(Δ:=ec) c)). - - set (@nil (HaskTyVar Γ ★)) as lev. - - unfold hom; unfold ob; unfold ehom; simpl; unfold pmon_I; unfold obact; intros. - - induction X; simpl. - - (* the proof from no hypotheses of no conclusions (nd_id0) becomes RVoid *) - apply nd_rule; apply (org_fc _ _ [] [(_,_)] (RVoid _ _)). apply Flat_RVoid. - - (* the proof from hypothesis X of conclusion X (nd_id1) becomes RVar *) - apply nd_rule; apply (org_fc _ _ [] [(_,_)] (RVar _ _ _ _)). apply Flat_RVar. - - (* the proof from hypothesis X of no conclusions (nd_weak) becomes RWeak;;RVoid *) - eapply nd_comp; - [ idtac - | eapply nd_rule - ; eapply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RWeak _))) - ; auto ]. - eapply nd_rule. - eapply (org_fc _ _ [] [(_,_)] (RVoid _ _)); auto. apply Flat_RVoid. - apply Flat_RArrange. - - (* the proof from hypothesis X of two identical conclusions X,,X (nd_copy) becomes RVar;;RJoin;;RCont *) - eapply nd_comp; [ idtac | eapply nd_rule; eapply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RCont _))) ]. - eapply nd_comp; [ apply nd_llecnac | idtac ]. - set (snd_initial(SequentND:=pl_snd(ProgrammingLanguage:=SystemFCa Γ Δ)) - (mapOptionTree (guestJudgmentAsGArrowType(Δ:=ec)) h @@@ lev)) as q. - eapply nd_comp. - eapply nd_prod. - apply q. - apply q. - apply nd_rule. - eapply (org_fc _ _ ([(_,_)],,[(_,_)]) [(_,_)] (RJoin _ _ _ _ _ _ )). - destruct h; simpl. - destruct o. - simpl. - apply Flat_RJoin. - apply Flat_RJoin. - apply Flat_RJoin. - apply Flat_RArrange. - - (* nd_prod becomes nd_llecnac;;nd_prod;;RJoin *) - eapply nd_comp. - apply (nd_llecnac ;; nd_prod IHX1 IHX2). - apply nd_rule. - eapply (org_fc _ _ ([(_,_)],,[(_,_)]) [(_,_)] (RJoin _ _ _ _ _ _ )). - apply (Flat_RJoin Γ Δ (mapOptionTree guestJudgmentAsGArrowType h1 @@@ nil) - (mapOptionTree guestJudgmentAsGArrowType h2 @@@ nil) - (mapOptionTree guestJudgmentAsGArrowType c1 @@@ nil) - (mapOptionTree guestJudgmentAsGArrowType c2 @@@ nil)). - - (* nd_comp becomes pl_subst (aka nd_cut) *) - eapply nd_comp. - apply (nd_llecnac ;; nd_prod IHX1 IHX2). - clear IHX1 IHX2 X1 X2. - (* - apply (@snd_cut _ _ _ _ _ _ (@pl_cnd _ _ _ _ (SystemFCa Γ Δ))). - *) - admit. - - (* nd_cancell becomes RVar;;RuCanL *) - eapply nd_comp; - [ idtac | eapply nd_rule; apply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RuCanL _))) ]. - apply (snd_initial(SequentND:=pl_cnd(ProgrammingLanguage:=(SystemFCa Γ Δ)))). - apply Flat_RArrange. - - (* nd_cancelr becomes RVar;;RuCanR *) - eapply nd_comp; - [ idtac | eapply nd_rule; apply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RuCanR _))) ]. - apply (snd_initial(SequentND:=pl_cnd(ProgrammingLanguage:=(SystemFCa Γ Δ)))). - apply Flat_RArrange. - - (* nd_llecnac becomes RVar;;RCanL *) - eapply nd_comp; - [ idtac | eapply nd_rule; apply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RCanL _))) ]. - apply (snd_initial(SequentND:=pl_cnd(ProgrammingLanguage:=(SystemFCa Γ Δ)))). - apply Flat_RArrange. - - (* nd_rlecnac becomes RVar;;RCanR *) - eapply nd_comp; - [ idtac | eapply nd_rule; apply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RCanR _))) ]. - apply (snd_initial(SequentND:=pl_cnd(ProgrammingLanguage:=(SystemFCa Γ Δ)))). - apply Flat_RArrange. - - (* nd_assoc becomes RVar;;RAssoc *) - eapply nd_comp; - [ idtac | eapply nd_rule; apply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RAssoc _ _ _))) ]. - apply (snd_initial(SequentND:=pl_cnd(ProgrammingLanguage:=(SystemFCa Γ Δ)))). - apply Flat_RArrange. - - (* nd_cossa becomes RVar;;RCossa *) - eapply nd_comp; - [ idtac | eapply nd_rule; apply (org_fc _ _ [(_,_)] [(_,_)] (RArrange _ _ _ _ _ (RCossa _ _ _))) ]. - apply (snd_initial(SequentND:=pl_cnd(ProgrammingLanguage:=(SystemFCa Γ Δ)))). - apply Flat_RArrange. - - destruct r as [r rp]. - refine (match rp as R in @Rule_PCF _ _ _ H C _ with - | PCF_RArrange h c r q => let case_RURule := tt in _ - | PCF_RLit lit => let case_RLit := tt in _ - | PCF_RNote Σ τ n => let case_RNote := tt in _ - | PCF_RVar σ => let case_RVar := tt in _ - | PCF_RLam Σ tx te => let case_RLam := tt in _ - | PCF_RApp Σ tx te p => let case_RApp := tt in _ - | PCF_RLet Σ σ₁ σ₂ p => let case_RLet := tt in _ - | PCF_RJoin b c d e => let case_RJoin := tt in _ - | PCF_RVoid => let case_RVoid := tt in _ - (*| PCF_RCase T κlen κ θ l x => let case_RCase := tt in _*) - (*| PCF_RLetRec Σ₁ τ₁ τ₂ lev => let case_RLetRec := tt in _*) - end); simpl in *. - clear rp. - clear r h c. - rename r0 into r; rename h0 into h; rename c0 into c. - - destruct case_RURule. - refine (match q with - | RLeft a b c r => let case_RLeft := tt in _ - | RRight a b c r => let case_RRight := tt in _ - | RCanL b => let case_RCanL := tt in _ - | RCanR b => let case_RCanR := tt in _ - | RuCanL b => let case_RuCanL := tt in _ - | RuCanR b => let case_RuCanR := tt in _ - | RAssoc b c d => let case_RAssoc := tt in _ - | RCossa b c d => let case_RCossa := tt in _ - | RExch b c => let case_RExch := tt in _ - | RWeak b => let case_RWeak := tt in _ - | RCont b => let case_RCont := tt in _ - | RComp a b c f g => let case_RComp := tt in _ - end). - - destruct case_RCanL. - (* ga_cancell *) - admit. - - destruct case_RCanR. - (* ga_cancelr *) - admit. - - destruct case_RuCanL. - (* ga_uncancell *) - admit. - - destruct case_RuCanR. - (* ga_uncancelr *) - admit. - - destruct case_RAssoc. - (* ga_assoc *) - admit. - - destruct case_RCossa. - (* ga_unassoc *) - admit. - - destruct case_RExch. - (* ga_swap *) - admit. - - destruct case_RWeak. - (* ga_drop *) - admit. - - destruct case_RCont. - (* ga_copy *) - admit. - - destruct case_RLeft. - (* ga_second *) - admit. - - destruct case_RRight. - (* ga_first *) - admit. - - destruct case_RComp. - (* ga_comp *) - admit. - - destruct case_RLit. - (* ga_literal *) - admit. - - (* hey cool, I figured out how to pass CoreNote's through... *) - destruct case_RNote. - eapply nd_comp. - eapply nd_rule. - eapply (org_fc _ _ [] [(_,_)] (RVar _ _ _ _)) . auto. - apply Flat_RVar. - apply nd_rule. - apply (org_fc _ _ [(_,_)] [(_,_)] (RNote _ _ _ _ _ n)). auto. - apply Flat_RNote. - - destruct case_RVar. - (* ga_id *) - admit. - - destruct case_RLam. - (* ga_curry, but try to avoid this someday in the future if the argument type isn't a function *) - admit. - - destruct case_RApp. - (* ga_apply *) - admit. - - destruct case_RLet. - (* ga_comp! perhaps this means the ga_curry avoidance can be done by turning lambdas into lets? *) - admit. - - destruct case_RVoid. - (* ga_id u *) - admit. - - destruct case_RJoin. - (* ga_first+ga_second; technically this assumes a specific evaluation order, which is bad *) - admit. - - Defined. - - Instance FlatteningFunctor {Γ}{Δ}{ec} : Functor (JudgmentsL (PCF Γ Δ ec)) (TypesL (SystemFCa Γ Δ)) (obact) := - { fmor := FlatteningFunctor_fmor }. - admit. - admit. - admit. - Defined. - - (* - Definition ReificationFunctor Γ Δ : Functor (JudgmentsL _ _ (PCF n Γ Δ)) SystemFCa' (mapOptionTree brakifyJudg). - refine {| fmor := ReificationFunctor_fmor Γ Δ |}; unfold hom; unfold ob; simpl ; intros. - unfold ReificationFunctor_fmor; simpl. - admit. - unfold ReificationFunctor_fmor; simpl. - admit. - unfold ReificationFunctor_fmor; simpl. - admit. - Defined. - - Definition PCF_SMME (n:nat)(Γ:TypeEnv)(Δ:CoercionEnv Γ) : ProgrammingLanguageSMME. - refine {| plsmme_pl := PCF n Γ Δ |}. - admit. - Defined. - - Definition SystemFCa_SMME (n:nat)(Γ:TypeEnv)(Δ:CoercionEnv Γ) : ProgrammingLanguageSMME. - refine {| plsmme_pl := SystemFCa n Γ Δ |}. - admit. - Defined. - - Definition ReificationFunctorMonoidal n : MonoidalFunctor (JudgmentsN n) (JudgmentsN (S n)) (ReificationFunctor n). - admit. - Defined. - - (* 5.1.4 *) - Definition PCF_SystemFCa_two_level n Γ Δ : TwoLevelLanguage (PCF_SMME n Γ Δ) (SystemFCa_SMME (S n) Γ Δ). - admit. - (* ... and the retraction exists *) - Defined. - *) - (* Any particular proof in HaskProof is only finitely large, so it uses only finitely many levels of nesting, so - * it falls within (SystemFCa n) for some n. This function calculates that "n" and performs the translation *) - (* - Definition HaskProof_to_SystemFCa : - forall h c (pf:ND Rule h c), - { n:nat & h ~~{JudgmentsL (SystemFCa_SMME n)}~~> c }. - *) - (* for every n we have a functor from the category of (n+1)-bounded proofs to the category of n-bounded proofs *) - -End HaskProofFlattener. - diff --git a/src/HaskProofToLatex.v b/src/HaskProofToLatex.v index 4773eff..d35a870 100644 --- a/src/HaskProofToLatex.v +++ b/src/HaskProofToLatex.v @@ -11,7 +11,8 @@ Require Import Coq.Lists.List. Require Import HaskKinds. Require Import HaskWeakVars. Require Import HaskWeakTypes. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskStrongTypes. Require Import HaskStrong. Require Import HaskProof. @@ -101,7 +102,7 @@ Fixpoint typeToLatexMath (needparens:bool){κ}(t:RawHaskType (fun _ => LatexMath ; let body := t1'+++(rawLatexMath " ")+++t2' in return (if needparens then (rawLatexMath "(")+++body+++(rawLatexMath ")") else body) end - | TyFunApp tfc lt => bind rest = typeListToRawLatexMath false lt + | TyFunApp tfc _ _ lt => bind rest = typeListToRawLatexMath false lt ; return (rawLatexMath "{\text{\tt{")+++(toLatexMath (toString tfc))+++(rawLatexMath "}}}")+++ (rawLatexMath "_{")+++(rawLatexMath (toString (length (fst (tyFunKind tfc)))))+++ (rawLatexMath "}")+++ @@ -126,7 +127,7 @@ Definition ltypeToLatexMath {Γ:TypeEnv}{κ}(t:LeveledHaskType Γ κ) : VarNameS | nil => t'' | lv => (rawLatexMath " ")+++t''+++(rawLatexMath " @ ")+++ (fold_left (fun x y => x+++(rawLatexMath ":")+++y) - (map (fun l:HaskTyVar Γ ★ => l (fun _ => LatexMath) ite) lv) (rawLatexMath "")) + (map (fun l:HaskTyVar Γ _ => l (fun _ => LatexMath) ite) lv) (rawLatexMath "")) end end); try apply ConcatenableLatexMath. try apply VarNameMonad. diff --git a/src/HaskProofToStrong.v b/src/HaskProofToStrong.v index 06f97a1..19f2f62 100644 --- a/src/HaskProofToStrong.v +++ b/src/HaskProofToStrong.v @@ -569,7 +569,6 @@ Section HaskProofToStrong. destruct case_RGlobal. apply ILeaf; simpl; intros; refine (return ILeaf _ _). apply EGlobal. - apply wev. destruct case_RLam. apply ILeaf. @@ -633,7 +632,7 @@ Section HaskProofToStrong. apply ileaf in q1'. apply ileaf in q2'. simpl in *. - apply (EApp _ _ _ _ _ _ q1' q2'). + apply (EApp _ _ _ _ _ _ q2' q1'). destruct case_RLet. apply ILeaf. @@ -715,6 +714,7 @@ Section HaskProofToStrong. inversion X; subst; clear X. apply (@ELetRec _ _ _ _ _ _ _ varstypes). + auto. apply (@letrec_helper Γ Δ t varstypes). rewrite <- pf2 in X1. rewrite mapOptionTree_compose. @@ -758,15 +758,12 @@ Section HaskProofToStrong. apply H2. Defined. - Definition closed2expr : forall c (pn:@ClosedSIND _ Rule c), ITree _ judg2exprType c. - refine (( - fix closed2expr' j (pn:@ClosedSIND _ Rule j) {struct pn} : ITree _ judg2exprType j := - match pn in @ClosedSIND _ _ J return ITree _ judg2exprType J with - | cnd_weak => let case_nil := tt in INone _ _ - | cnd_rule h c cnd' r => let case_rule := tt in rule2expr _ _ r (closed2expr' _ cnd') - | cnd_branch _ _ c1 c2 => let case_branch := tt in IBranch _ _ (closed2expr' _ c1) (closed2expr' _ c2) - end)); clear closed2expr'; intros; subst. - Defined. + Fixpoint closed2expr h j (pn:@SIND _ Rule h j) {struct pn} : ITree _ judg2exprType h -> ITree _ judg2exprType j := + match pn in @SIND _ _ H J return ITree _ judg2exprType H -> ITree _ judg2exprType J with + | scnd_weak _ => let case_nil := tt in fun _ => INone _ _ + | scnd_comp x h c cnd' r => let case_rule := tt in fun q => rule2expr _ _ r (closed2expr _ _ cnd' q) + | scnd_branch _ _ _ c1 c2 => let case_branch := tt in fun q => IBranch _ _ (closed2expr _ _ c1 q) (closed2expr _ _ c2 q) + end. Lemma manyFresh : forall Γ Σ (ξ0:VV -> LeveledHaskType Γ ★), FreshM { vars : _ & { ξ : VV -> LeveledHaskType Γ ★ & Σ = mapOptionTree ξ vars } }. @@ -804,7 +801,7 @@ Section HaskProofToStrong. {zz:ToString VV} : ND Rule [] [Γ > Δ > Σ |- [τ]] -> FreshM (???{ ξ : _ & Expr Γ Δ ξ τ}). intro pf. - set (closedFromSIND _ _ (mkSIND systemfc_all_rules_one_conclusion _ _ _ pf (scnd_weak [])) cnd_weak) as cnd. + set (mkSIND systemfc_all_rules_one_conclusion _ _ _ pf (scnd_weak [])) as cnd. apply closed2expr in cnd. apply ileaf in cnd. simpl in *. @@ -819,6 +816,7 @@ Section HaskProofToStrong. refine (return OK _). exists ξ. apply (ileaf it). + apply INone. Defined. End HaskProofToStrong. diff --git a/src/HaskSkolemizer.v b/src/HaskSkolemizer.v new file mode 100644 index 0000000..2c1c9d2 --- /dev/null +++ b/src/HaskSkolemizer.v @@ -0,0 +1,425 @@ +(*********************************************************************************************************************************) +(* HaskSkolemizer: *) +(* *) +(* Skolemizes the portion of a proof which uses judgments at level >0 *) +(* *) +(*********************************************************************************************************************************) + +Generalizable All Variables. +Require Import Preamble. +Require Import General. +Require Import NaturalDeduction. +Require Import Coq.Strings.String. +Require Import Coq.Lists.List. + +Require Import HaskKinds. +Require Import HaskCoreTypes. +Require Import HaskCoreVars. +Require Import HaskWeakTypes. +Require Import HaskWeakVars. +Require Import HaskLiterals. +Require Import HaskTyCons. +Require Import HaskStrongTypes. +Require Import HaskProof. +Require Import NaturalDeduction. + +Require Import HaskStrongTypes. +Require Import HaskStrong. +Require Import HaskProof. +Require Import HaskStrongToProof. +Require Import HaskProofToStrong. +Require Import HaskWeakToStrong. + +Open Scope nd_scope. +Set Printing Width 130. + +Section HaskSkolemizer. + +(* + Fixpoint debruijn2phoas {κ} (exp: RawHaskType (fun _ => nat) κ) : HaskType TV κ := + match exp with + | TVar _ x => x + | TAll _ y => TAll _ (fun v => debruijn2phoas (y (TVar v))) + | TApp _ _ x y => TApp (debruijn2phoas x) (debruijn2phoas y) + | TCon tc => TCon tc + | TCoerc _ t1 t2 t => TCoerc (debruijn2phoas t1) (debruijn2phoas t2) (debruijn2phoas t) + | TArrow => TArrow + | TCode v e => TCode (debruijn2phoas v) (debruijn2phoas e) + | TyFunApp tfc kl k lt => TyFunApp tfc kl k (debruijn2phoasyFunApp _ lt) + end + with debruijn2phoasyFunApp (lk:list Kind)(exp:@RawHaskTypeList (fun _ => nat) lk) : @HaskTypeList TV lk := + match exp in @RawHaskTypeList _ LK return @RawHaskTypeList TV LK with + | TyFunApp_nil => TyFunApp_nil + | TyFunApp_cons κ kl t rest => TyFunApp_cons _ _ (debruijn2phoas t) (debruijn2phoasyFunApp _ rest) + end. +*) + Definition isNotBrakOrEsc {h}{c} (r:Rule h c) : Prop := + match r with + | RBrak _ _ _ _ _ _ => False + | REsc _ _ _ _ _ _ => False + | _ => True + end. + + Fixpoint mkArrows {Γ}(lt:list (HaskType Γ ★))(t:HaskType Γ ★) : HaskType Γ ★ := + match lt with + | nil => t + | a::b => mkArrows b (a ---> t) + end. + + Fixpoint unleaves_ {Γ}(t:Tree ??(LeveledHaskType Γ ★))(l:list (HaskType Γ ★)) lev : Tree ??(LeveledHaskType Γ ★) := + match l with + | nil => t + | a::b => unleaves_ (t,,[a @@ lev]) b lev + end. + + (* rules of skolemized proofs *) + Definition getΓ (j:Judg) := match j with Γ > _ > _ |- _ => Γ end. + Definition getSuc (j:Judg) : Tree ??(LeveledHaskType (getΓ j) ★) := + match j as J return Tree ??(LeveledHaskType (getΓ J) ★) with Γ > _ > _ |- s => s end. + Fixpoint getjlev {Γ}(tt:Tree ??(LeveledHaskType Γ ★)) : HaskLevel Γ := + match tt with + | T_Leaf None => nil + | T_Leaf (Some (_ @@ lev)) => lev + | T_Branch b1 b2 => + match getjlev b1 with + | nil => getjlev b2 + | lev => lev + end + end. + + Fixpoint take_trustme {Γ} + (n:nat) + (l:forall TV, InstantiatedTypeEnv TV Γ -> list (RawHaskType TV ★)) + : list (HaskType Γ ★) := + + match n with + | 0 => nil + | S n' => (fun TV ite => match l TV ite with + | nil => Prelude_error "impossible" + | a::b => a + end) + :: + take_trustme n' (fun TV ite => match l TV ite with + | nil => Prelude_error "impossible" + | a::b => b + end) + end. + + Definition take_arg_types_as_tree {Γ}(ht:HaskType Γ ★) : Tree ??(HaskType Γ ★ ) := + unleaves + (take_trustme + (count_arg_types (ht _ (ite_unit _))) + (fun TV ite => take_arg_types (ht TV ite))). + + Definition drop_arg_types_as_tree {Γ} (ht:HaskType Γ ★) : HaskType Γ ★ := + fun TV ite => drop_arg_types (ht TV ite). + + Implicit Arguments take_arg_types_as_tree [[Γ]]. + Implicit Arguments drop_arg_types_as_tree [[Γ]]. + + Lemma take_works : forall {Γ}(t1 t2:HaskType Γ ★), + take_arg_types_as_tree (t1 ---> t2) = [t1],,(take_arg_types_as_tree t2). + intros. + unfold take_arg_types_as_tree at 1. + simpl. + admit. + Qed. + + Lemma drop_works : forall {Γ}(t1 t2:HaskType Γ ★), + drop_arg_types_as_tree (t1 ---> t2) = (drop_arg_types_as_tree t2). + intros. + unfold drop_arg_types_as_tree. + simpl. + reflexivity. + Qed. + + Inductive SRule : Tree ??Judg -> Tree ??Judg -> Type := +(* | SFlat : forall h c (r:Rule h c), isNotBrakOrEsc r -> SRule h c*) + | SFlat : forall h c, Rule h c -> SRule h c + | SBrak : forall Γ Δ t ec Σ l, + SRule + [Γ > Δ > Σ,, (take_arg_types_as_tree t @@@ (ec::l)) |- [ drop_arg_types_as_tree t @@ (ec::l) ]] + [Γ > Δ > Σ |- [<[ec |- t]> @@ l ]] + + | SEsc : forall Γ Δ t ec Σ l, + SRule + [Γ > Δ > Σ |- [<[ec |- t]> @@ l ]] + [Γ > Δ > Σ,, (take_arg_types_as_tree t @@@ (ec::l)) |- [ drop_arg_types_as_tree t @@ (ec::l) ]] + . + + Definition take_arg_types_as_tree' {Γ}(lt:LeveledHaskType Γ ★) := + match lt with t @@ l => take_arg_types_as_tree t @@@ l end. + + Definition drop_arg_types_as_tree' {Γ}(lt:LeveledHaskType Γ ★) := + match lt with t @@ l => drop_arg_types_as_tree t @@ l end. + + Definition skolemize_judgment (j:Judg) : Judg := + match j with + Γ > Δ > Σ₁ |- Σ₂ => + match getjlev Σ₂ with + | nil => j + | lev => Γ > Δ > Σ₁,,(mapOptionTreeAndFlatten take_arg_types_as_tree' Σ₂) |- mapOptionTree drop_arg_types_as_tree' Σ₂ + end + end. + + Definition check_hof : forall {Γ}(t:HaskType Γ ★), + sumbool + True + (take_arg_types_as_tree t = [] /\ drop_arg_types_as_tree t = t). + intros. + destruct (eqd_dec (take_arg_types_as_tree t) []); + destruct (eqd_dec (drop_arg_types_as_tree t) t). + right; auto. + left; auto. + left; auto. + left; auto. + Defined. + + Opaque take_arg_types_as_tree. + Definition skolemize_proof : + forall {h}{c}, + ND Rule h c -> + ND SRule (mapOptionTree skolemize_judgment h) (mapOptionTree skolemize_judgment c). + intros. + eapply nd_map'; [ idtac | apply X ]. + clear h c X. + intros. + + refine (match X as R in Rule H C with + | RArrange Γ Δ a b x d => let case_RArrange := tt in _ + | RNote Γ Δ Σ τ l n => let case_RNote := tt in _ + | RLit Γ Δ l _ => let case_RLit := tt in _ + | RVar Γ Δ σ lev => let case_RVar := tt in _ + | RGlobal Γ Δ σ l wev => let case_RGlobal := tt in _ + | RLam Γ Δ Σ tx te lev => let case_RLam := tt in _ + | RCast Γ Δ Σ σ τ lev γ => let case_RCast := tt in _ + | RAbsT Γ Δ Σ κ σ lev => let case_RAbsT := tt in _ + | RAppT Γ Δ Σ κ σ τ lev => let case_RAppT := tt in _ + | RAppCo Γ Δ Σ κ σ₁ σ₂ γ σ lev => let case_RAppCo := tt in _ + | RAbsCo Γ Δ Σ κ σ σ₁ σ₂ lev => let case_RAbsCo := tt in _ + | RApp Γ Δ Σ₁ Σ₂ tx te lev => let case_RApp := tt in _ + | RLet Γ Δ Σ₁ Σ₂ σ₁ σ₂ lev => let case_RLet := tt in _ + | RJoin Γ p lri m x q => let case_RJoin := tt in _ + | RVoid _ _ => let case_RVoid := tt in _ + | RBrak Γ Δ t ec succ lev => let case_RBrak := tt in _ + | REsc Γ Δ t ec succ lev => let case_REsc := tt in _ + | RCase Γ Δ lev tc Σ avars tbranches alts => let case_RCase := tt in _ + | RLetRec Γ Δ lri x y t => let case_RLetRec := tt in _ + end); clear X h c. + + destruct case_RArrange. + simpl. + destruct (getjlev x). + apply nd_rule. + apply SFlat. + apply RArrange. + apply d. + apply nd_rule. + apply SFlat. + apply RArrange. + apply RRight. + apply d. + + destruct case_RBrak. + simpl. + destruct lev; [ idtac | apply (Prelude_error "Brak with nesting depth >1") ]. + apply nd_rule. + apply SBrak. + + destruct case_REsc. + simpl. + destruct lev; [ idtac | apply (Prelude_error "Esc with nesting depth >1") ]. + apply nd_rule. + apply SEsc. + + destruct case_RNote. + apply nd_rule. + apply SFlat. + simpl. + destruct l. + apply RNote. + apply n. + apply RNote. + apply n. + + destruct case_RLit. + simpl. + destruct l0. + apply nd_rule. + apply SFlat. + apply RLit. + set (check_hof (@literalType l Γ)) as hof. + destruct hof; [ apply (Prelude_error "attempt to use a literal with higher-order type at depth>0") | idtac ]. + destruct a. + rewrite H. + rewrite H0. + simpl. + eapply nd_comp; [ idtac | eapply nd_rule; eapply SFlat; eapply RArrange; apply RuCanL ]. + apply nd_rule. + apply SFlat. + apply RLit. + + destruct case_RVar. + simpl. + destruct lev. + apply nd_rule; apply SFlat; apply RVar. + set (check_hof σ) as hof. + destruct hof; [ apply (Prelude_error "attempt to use a variable with higher-order type at depth>0") | idtac ]. + destruct a. + rewrite H. + rewrite H0. + simpl. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RuCanR ]. + apply nd_rule. + apply SFlat. + apply RVar. + + destruct case_RGlobal. + simpl. + destruct σ. + apply nd_rule; apply SFlat; apply RGlobal. + set (check_hof (l wev)) as hof. + destruct hof; [ apply (Prelude_error "attempt to use a global with higher-order type at depth>0") | idtac ]. + destruct a. + rewrite H. + rewrite H0. + simpl. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RuCanR ]. + apply nd_rule. + apply SFlat. + apply RGlobal. + + destruct case_RLam. + destruct lev. + apply nd_rule. + apply SFlat. + simpl. + apply RLam. + simpl. + rewrite take_works. + rewrite drop_works. + apply nd_rule. + apply SFlat. + apply RArrange. + apply RCossa. + + destruct case_RCast. + simpl. + destruct lev. + apply nd_rule. + apply SFlat. + apply RCast. + apply γ. + apply (Prelude_error "found RCast at level >0"). + + destruct case_RJoin. + simpl. + destruct (getjlev x). + destruct (getjlev q). + apply nd_rule. + apply SFlat. + apply RJoin. + apply (Prelude_error "found RJoin at level >0"). + apply (Prelude_error "found RJoin at level >0"). + + destruct case_RApp. + simpl. + destruct lev. + apply nd_rule. + apply SFlat. + apply RApp. + rewrite take_works. + rewrite drop_works. + set (check_hof tx) as hof_tx. + destruct hof_tx; [ apply (Prelude_error "attempt tp apply a higher-order function at depth>0") | idtac ]. + destruct a. + rewrite H. + rewrite H0. + simpl. + set (@RLet Γ Δ (Σ₂,,take_arg_types_as_tree te @@@ (h :: lev)) Σ₁ (drop_arg_types_as_tree te) tx (h::lev)) as q. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RAssoc ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RExch ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply q ]. + clear q. + apply nd_prod. + apply nd_rule. + apply SFlat. + apply RArrange. + apply RCanR. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RAssoc ]. + apply nd_rule; apply SFlat; apply RArrange; apply RLeft; eapply RExch. + + destruct case_RLet. + simpl. + destruct lev. + apply nd_rule. + apply SFlat. + apply RLet. + set (check_hof σ₂) as hof_tx. + destruct hof_tx; [ apply (Prelude_error "attempt to let-bind a higher-order function at depth>0") | idtac ]. + destruct a. + rewrite H. + rewrite H0. + set (@RLet Γ Δ (Σ₁,,(take_arg_types_as_tree σ₁ @@@ (h::lev))) Σ₂ (drop_arg_types_as_tree σ₁) σ₂ (h::lev)) as q. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RAssoc ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; eapply RLeft; apply RExch ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RCossa ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply q ]. + clear q. + apply nd_prod. + apply nd_rule. + apply SFlat. + apply RArrange. + apply RCanR. + eapply nd_comp; [ eapply nd_rule; apply SFlat; eapply RArrange; apply RCossa | idtac ]. + eapply nd_comp; [ idtac | eapply nd_rule; apply SFlat; eapply RArrange; apply RAssoc ]. + apply nd_rule. + apply SFlat. + apply RArrange. + apply RLeft. + eapply RExch. + + destruct case_RVoid. + simpl. + apply nd_rule. + apply SFlat. + apply RVoid. + + destruct case_RAppT. + simpl. + destruct lev; [ apply nd_rule; apply SFlat; apply RAppT | idtac ]. + apply (Prelude_error "RAppT at depth>0"). + + destruct case_RAbsT. + simpl. + destruct lev; simpl; [ apply nd_rule; apply SFlat; apply (@RAbsT _ _ _ _ _ nil) | idtac ]. + apply (Prelude_error "RAbsT at depth>0"). + + destruct case_RAppCo. + simpl. + destruct lev; [ apply nd_rule; apply SFlat; apply RAppCo | idtac ]. + apply γ. + apply (Prelude_error "RAppCo at depth>0"). + + destruct case_RAbsCo. + simpl. + destruct lev; [ apply nd_rule; apply SFlat; apply RAbsCo | idtac ]. + apply (Prelude_error "RAbsCo at depth>0"). + + destruct case_RLetRec. + simpl. + destruct t. + destruct (getjlev (y @@@ nil)). + apply nd_rule. + apply SFlat. + apply (@RLetRec Γ Δ lri x y nil). + apply (Prelude_error "RLetRec at depth>0"). + apply (Prelude_error "RLetRec at depth>0"). + + destruct case_RCase. + simpl. + apply (Prelude_error "CASE: BIG FIXME"). + Defined. + Transparent take_arg_types_as_tree. + +End HaskSkolemizer. diff --git a/src/HaskStrong.v b/src/HaskStrong.v index c5f46dc..d52acb9 100644 --- a/src/HaskStrong.v +++ b/src/HaskStrong.v @@ -9,7 +9,8 @@ Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import HaskKinds. Require Import HaskCoreTypes. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskStrongTypes. Require Import HaskWeakVars. Require Import HaskCoreVars. @@ -31,7 +32,7 @@ Section HaskStrong. Inductive Expr : forall Γ (Δ:CoercionEnv Γ), (VV -> LeveledHaskType Γ ★) -> LeveledHaskType Γ ★ -> Type := (* an "EGlobal" is any variable which is free in the expression which was passed to -fcoqpass (ie bound outside it) *) - | EGlobal: ∀ Γ Δ ξ t, WeakExprVar -> Expr Γ Δ ξ t + | EGlobal: forall Γ Δ ξ (g:Global Γ) v lev, Expr Γ Δ ξ (g v @@ lev) | EVar : ∀ Γ Δ ξ ev, Expr Γ Δ ξ (ξ ev) | ELit : ∀ Γ Δ ξ lit l, Expr Γ Δ ξ (literalType lit@@l) @@ -61,6 +62,7 @@ Section HaskStrong. Expr Γ Δ ξ (tbranches @@ l) | ELetRec : ∀ Γ Δ ξ l τ vars, + distinct (leaves (mapOptionTree (@fst _ _) vars)) -> let ξ' := update_ξ ξ l (leaves vars) in ELetRecBindings Γ Δ ξ' l vars -> Expr Γ Δ ξ' (τ@@l) -> @@ -78,7 +80,7 @@ Section HaskStrong. Fixpoint exprToString {Γ}{Δ}{ξ}{τ}(exp:Expr Γ Δ ξ τ) : string := match exp with | EVar Γ' _ ξ' ev => "var."+++ toString ev - | EGlobal Γ' _ ξ' t wev => "global." +++ toString (wev:CoreVar) + | EGlobal Γ' _ ξ' g v _ => "global." +++ toString (g:CoreVar) | ELam Γ' _ _ tv _ _ cv e => "\("+++ toString cv +++":t) -> "+++ exprToString e | ELet Γ' _ _ t _ _ ev e1 e2 => "let "+++toString ev+++" = "+++exprToString e1+++" in "+++exprToString e2 | ELit _ _ _ lit _ => "lit."+++toString lit @@ -91,8 +93,8 @@ Section HaskStrong. | ECast Γ Δ ξ t1 t2 γ l e => "cast ("+++exprToString e+++"):t2" | ETyLam _ _ _ k _ _ e => "\@_ ->"+++ exprToString e | ECoLam Γ Δ κ σ σ₁ σ₂ ξ l e => "\~_ ->"+++ exprToString e - | ECase Γ Δ ξ l tc tbranches atypes escrut alts => "case " +++ exprToString escrut +++ " of FIXME" - | ELetRec _ _ _ _ _ vars elrb e => "letrec FIXME in " +++ exprToString e + | ECase Γ Δ ξ l tc branches atypes escrut alts => "case " +++ exprToString escrut +++ " of FIXME" + | ELetRec _ _ _ _ _ vars vu elrb e => "letrec FIXME in " +++ exprToString e end. Instance ExprToString Γ Δ ξ τ : ToString (Expr Γ Δ ξ τ) := { toString := exprToString }. diff --git a/src/HaskStrongToProof.v b/src/HaskStrongToProof.v index c1e54aa..9c3b041 100644 --- a/src/HaskStrongToProof.v +++ b/src/HaskStrongToProof.v @@ -121,31 +121,83 @@ Lemma strip_lemma a x t : stripOutVars (a::x) t = stripOutVars (a::nil) (stripOu reflexivity. Qed. -Lemma strip_twice_lemma x y t : stripOutVars x (stripOutVars y t) = stripOutVars (app y x) t. -(* - induction x. - simpl. +Lemma strip_nil_lemma t : stripOutVars nil t = t. + induction t; simpl. + unfold stripOutVars. + destruct a; reflexivity. + rewrite <- IHt1 at 2. + rewrite <- IHt2 at 2. + reflexivity. + Qed. + +Lemma strip_swap0_lemma : forall a a0 y t, + stripOutVars (a :: a0 :: y) t = stripOutVars (a0 :: a :: y) t. + intros. unfold stripOutVars. - simpl. - rewrite mapOptionTree'_compose. induction t. - destruct a; try reflexivity. - simpl. - destruct (dropVar y v); reflexivity. - simpl. - rewrite IHt1. - rewrite IHt2. - reflexivity. - rewrite strip_lemma. - rewrite IHx. - rewrite <- strip_lemma. - rewrite app_comm_cons. - reflexivity. -*) - admit. + destruct a1; simpl; [ idtac | reflexivity ]. + destruct (eqd_dec v a); subst. + destruct (eqd_dec a a0); subst. + reflexivity. + reflexivity. + destruct (eqd_dec v a0); subst. + reflexivity. + reflexivity. + simpl in *. + rewrite IHt1. + rewrite IHt2. + reflexivity. + Qed. + +Lemma strip_swap1_lemma : forall a y t, + stripOutVars (a :: nil) (stripOutVars y t) = + stripOutVars y (stripOutVars (a :: nil) t). + intros. + induction y. + rewrite strip_nil_lemma. + rewrite strip_nil_lemma. + reflexivity. + rewrite (strip_lemma a0 y (stripOutVars (a::nil) t)). + rewrite <- IHy. + clear IHy. + rewrite <- (strip_lemma a y t). + rewrite <- strip_lemma. + rewrite <- strip_lemma. + apply strip_swap0_lemma. + Qed. + +Lemma strip_swap_lemma : forall x y t, stripOutVars x (stripOutVars y t) = stripOutVars y (stripOutVars x t). + intros; induction t. + destruct a; simpl. + + induction x. + rewrite strip_nil_lemma. + rewrite strip_nil_lemma. + reflexivity. + rewrite strip_lemma. + rewrite (strip_lemma a x [v]). + rewrite IHx. + clear IHx. + apply strip_swap1_lemma. + reflexivity. + unfold stripOutVars in *. + simpl. + rewrite IHt1. + rewrite IHt2. + reflexivity. Qed. -Lemma strip_distinct a y : (not (In a (leaves y))) -> stripOutVars (a :: nil) y = y. +Lemma strip_twice_lemma x y t : stripOutVars x (stripOutVars y t) = stripOutVars (app x y) t. + induction x; simpl. + apply strip_nil_lemma. + rewrite strip_lemma. + rewrite IHx. + clear IHx. + rewrite <- strip_lemma. + reflexivity. + Qed. + +Lemma notin_strip_inert a y : (not (In a (leaves y))) -> stripOutVars (a :: nil) y = y. intros. induction y. destruct a0; try reflexivity. @@ -232,7 +284,7 @@ Lemma distinct3 {T}(a b c:list T) : distinct (app (app a b) c) -> distinct (app auto. Qed. -Lemma strip_distinct' y : forall x, distinct (app x (leaves y)) -> stripOutVars x y = y. +Lemma notin_strip_inert' y : forall x, distinct (app x (leaves y)) -> stripOutVars x y = y. induction x; intros. simpl in H. unfold stripOutVars. @@ -250,7 +302,7 @@ Lemma strip_distinct' y : forall x, distinct (app x (leaves y)) -> stripOutVars set (IHx H3) as qq. rewrite strip_lemma. rewrite IHx. - apply strip_distinct. + apply notin_strip_inert. unfold not; intros. apply H2. apply In_both'. @@ -258,53 +310,219 @@ Lemma strip_distinct' y : forall x, distinct (app x (leaves y)) -> stripOutVars auto. Qed. +Lemma dropvar_lemma v v' y : dropVar y v = Some v' -> v=v'. + intros. + induction y; auto. + simpl in H. + inversion H. + auto. + apply IHy. + simpl in H. + destruct (eqd_dec v a). + inversion H. + auto. + Qed. + Lemma updating_stripped_tree_is_inert' {Γ} lev (ξ:VV -> LeveledHaskType Γ ★) lv tree2 : mapOptionTree (update_ξ ξ lev lv) (stripOutVars (map (@fst _ _) lv) tree2) = mapOptionTree ξ (stripOutVars (map (@fst _ _) lv) tree2). + induction tree2. - destruct a. - simpl. - induction lv. - reflexivity. - simpl. - destruct a. - simpl. - set (eqd_dec v v0) as q. - destruct q. - auto. - set (dropVar (map (@fst _ _) lv) v) as b in *. - destruct b. - inversion IHlv. - admit. - auto. - reflexivity. + destruct a; [ idtac | reflexivity ]; simpl. + induction lv; [ reflexivity | idtac ]; simpl. + destruct a; simpl. + set (eqd_dec v v0) as q; destruct q; auto. + set (dropVar (map (@fst _ _) lv) v) as b in *. + assert (dropVar (map (@fst _ _) lv) v=b). reflexivity. + destruct b; [ idtac | reflexivity ]. + apply dropvar_lemma in H. + subst. + inversion IHlv. + rewrite H0. + clear H0 IHlv. + destruct (eqd_dec v0 v1). + subst. + assert False. apply n. intros; auto. inversion H. + reflexivity. + unfold stripOutVars in *. + simpl. + rewrite <- IHtree2_1. + rewrite <- IHtree2_2. + reflexivity. + Qed. + +Lemma distinct_bogus : forall {T}a0 (a:list T), distinct (a0::(app a (a0::nil))) -> False. + intros; induction a; auto. + simpl in H. + inversion H; subst. + apply H2; auto. + unfold In; simpl. + left; auto. + apply IHa. + clear IHa. + rewrite <- app_comm_cons in H. + inversion H; subst. + inversion H3; subst. + apply distinct_cons; auto. + intros. + apply H2. simpl. - unfold stripOutVars in *. - rewrite <- IHtree2_1. - rewrite <- IHtree2_2. - reflexivity. + right; auto. Qed. -Lemma update_ξ_lemma `{EQD_VV:EqDecidable VV} : forall Γ ξ (lev:HaskLevel Γ)(varstypes:Tree ??(VV*_)), - distinct (map (@fst _ _) (leaves varstypes)) -> - mapOptionTree (update_ξ ξ lev (leaves varstypes)) (mapOptionTree (@fst _ _) varstypes) = - mapOptionTree (fun t => t@@ lev) (mapOptionTree (@snd _ _) varstypes). - admit. +Lemma distinct_swap' : forall {T}a (b:list T), distinct (app b (a::nil)) -> distinct (a::b). + intros. + apply distinct_cons. + induction b; intros; simpl; auto. + rewrite <- app_comm_cons in H. + inversion H; subst. + set (IHb H4) as H4'. + apply H4'. + inversion H0; subst; auto. + apply distinct_bogus in H; inversion H. + induction b; intros; simpl; auto. + apply distinct_nil. + apply distinct_app in H. + destruct H; auto. Qed. +Lemma in_both : forall {T}(a b:list T) x, In x (app a b) -> In x a \/ In x b. + induction a; intros; simpl; auto. + rewrite <- app_comm_cons in H. + inversion H. + subst. + left; left; auto. + set (IHa _ _ H0) as H'. + destruct H'. + left; right; auto. + right; auto. + Qed. +Lemma distinct_lemma : forall {T} (a b:list T) a0, distinct (app a (a0 :: b)) -> distinct (app a b). + intros. + induction a; simpl; auto. + simpl in H. + inversion H; auto. + assert (distinct (app a1 b)) as Q. + intros. + apply IHa. + clear IHa. + rewrite <- app_comm_cons in H. + inversion H; subst; auto. + apply distinct_cons; [ idtac | apply Q ]. + intros. + apply in_both in H0. + destruct H0. + rewrite <- app_comm_cons in H. + inversion H; subst; auto. + apply H3. + apply In_both; auto. + rewrite <- app_comm_cons in H. + inversion H; subst; auto. + apply H3. + apply In_both'; auto. + simpl. + right; auto. + Qed. + +Lemma nil_app : forall {T}(a:list T) x, x::a = (app (x::nil) a). + induction a; intros; simpl; auto. + Qed. + +Lemma distinct_swap : forall {T}(a b:list T), distinct (app a b) -> distinct (app b a). + intros. + induction b. + rewrite <- app_nil_end in H; auto. + rewrite <- app_comm_cons. + set (distinct_lemma _ _ _ H) as H'. + set (IHb H') as H''. + apply distinct_cons; [ idtac | apply H'' ]. + intros. + apply in_both in H0. + clear H'' H'. + destruct H0. + apply distinct_app in H. + destruct H. + inversion H1; auto. + clear IHb. + rewrite nil_app in H. + rewrite ass_app in H. + apply distinct_app in H. + destruct H; auto. + apply distinct_swap' in H. + inversion H; auto. + Qed. +Lemma update_ξ_lemma' `{EQD_VV:EqDecidable VV} Γ ξ (lev:HaskLevel Γ)(varstypes:Tree ??(VV*_)) : + forall v1 v2, + distinct (map (@fst _ _) (leaves (v1,,(varstypes,,v2)))) -> + mapOptionTree (update_ξ ξ lev (leaves (v1,,(varstypes,,v2)))) (mapOptionTree (@fst _ _) varstypes) = + mapOptionTree (fun t => t@@ lev) (mapOptionTree (@snd _ _) varstypes). + induction varstypes; intros. + destruct a; simpl; [ idtac | reflexivity ]. + destruct p. + simpl. + simpl in H. + induction (leaves v1). + simpl; auto. + destruct (eqd_dec v v); auto. + assert False. apply n. auto. inversion H0. + simpl. + destruct a. + destruct (eqd_dec v0 v); subst; auto. + simpl in H. + rewrite map_app in H. + simpl in H. + rewrite nil_app in H. + apply distinct_swap in H. + rewrite app_ass in H. + apply distinct_app in H. + destruct H. + apply distinct_swap in H0. + simpl in H0. + inversion H0; subst. + assert False. + apply H3. + simpl; left; auto. + inversion H1. + apply IHl. + simpl in H. + inversion H; auto. + set (IHvarstypes1 v1 (varstypes2,,v2)) as i1. + set (IHvarstypes2 (v1,,varstypes1) v2) as i2. + simpl in *. + rewrite <- i1. + rewrite <- i2. + rewrite ass_app. + rewrite ass_app. + rewrite ass_app. + rewrite ass_app. + reflexivity. + clear i1 i2 IHvarstypes1 IHvarstypes2. + repeat rewrite ass_app in *; auto. + clear i1 i2 IHvarstypes1 IHvarstypes2. + repeat rewrite ass_app in *; auto. + Qed. +Lemma update_ξ_lemma `{EQD_VV:EqDecidable VV} Γ ξ (lev:HaskLevel Γ)(varstypes:Tree ??(VV*_)) : + distinct (map (@fst _ _) (leaves varstypes)) -> + mapOptionTree (update_ξ ξ lev (leaves varstypes)) (mapOptionTree (@fst _ _) varstypes) = + mapOptionTree (fun t => t@@ lev) (mapOptionTree (@snd _ _) varstypes). + set (update_ξ_lemma' Γ ξ lev varstypes [] []) as q. + simpl in q. + rewrite <- app_nil_end in q. + apply q. + Qed. Fixpoint expr2antecedent {Γ'}{Δ'}{ξ'}{τ'}(exp:Expr Γ' Δ' ξ' τ') : Tree ??VV := match exp as E in Expr Γ Δ ξ τ with - | EGlobal Γ Δ ξ _ _ => [] + | EGlobal Γ Δ ξ _ _ _ => [] | EVar Γ Δ ξ ev => [ev] | ELit Γ Δ ξ lit lev => [] - | EApp Γ Δ ξ t1 t2 lev e1 e2 => (expr2antecedent e1),,(expr2antecedent e2) + | EApp Γ Δ ξ t1 t2 lev e1 e2 => (expr2antecedent e2),,(expr2antecedent e1) | ELam Γ Δ ξ t1 t2 lev v e => stripOutVars (v::nil) (expr2antecedent e) | ELet Γ Δ ξ tv t lev v ev ebody => ((stripOutVars (v::nil) (expr2antecedent ebody)),,(expr2antecedent ev)) | EEsc Γ Δ ξ ec t lev e => expr2antecedent e @@ -315,7 +533,7 @@ Fixpoint expr2antecedent {Γ'}{Δ'}{ξ'}{τ'}(exp:Expr Γ' Δ' ξ' τ') : Tree ? | ECoLam Γ Δ κ σ σ₁ σ₂ ξ l e => expr2antecedent e | ECoApp Γ Δ κ γ σ₁ σ₂ σ ξ l e => expr2antecedent e | ETyApp Γ Δ κ σ τ ξ l e => expr2antecedent e - | ELetRec Γ Δ ξ l τ vars branches body => + | ELetRec Γ Δ ξ l τ vars _ branches body => let branch_context := eLetRecContext branches in let all_contexts := (expr2antecedent body),,branch_context in stripOutVars (leaves (mapOptionTree (@fst _ _ ) vars)) all_contexts @@ -499,10 +717,6 @@ Definition arrangeContextAndWeaken refine (RLeft _ (RWeak _)). Defined. -Lemma cheat : forall {T}(a b:list T), distinct (app a b) -> distinct (app b a). - admit. - Qed. - Definition arrangeContextAndWeaken'' (Γ:TypeEnv)(Δ:CoercionEnv Γ) v (* variable to be pivoted, if found *) @@ -547,11 +761,11 @@ Definition arrangeContextAndWeaken'' eapply RComp. apply qq. clear qq IHv2' IHv2 IHv1. + rewrite strip_swap_lemma. rewrite strip_twice_lemma. - - rewrite (strip_distinct' v1 (leaves v2)). + rewrite (notin_strip_inert' v1 (leaves v2)). apply RCossa. - apply cheat. + apply distinct_swap. auto. Defined. @@ -592,11 +806,10 @@ Lemma letRecSubproofsToND Γ Δ ξ lev tree branches : Defined. Lemma letRecSubproofsToND' Γ Δ ξ lev τ tree : - forall branches body, - distinct (leaves (mapOptionTree (@fst _ _) tree)) -> + forall branches body (dist:distinct (leaves (mapOptionTree (@fst _ _) tree))), ND Rule [] [Γ > Δ > mapOptionTree (update_ξ ξ lev (leaves tree)) (expr2antecedent body) |- [τ @@ lev]] -> LetRecSubproofs Γ Δ (update_ξ ξ lev (leaves tree)) lev tree branches -> - ND Rule [] [Γ > Δ > mapOptionTree ξ (expr2antecedent (@ELetRec VV _ Γ Δ ξ lev τ tree branches body)) |- [τ @@ lev]]. + ND Rule [] [Γ > Δ > mapOptionTree ξ (expr2antecedent (@ELetRec VV _ Γ Δ ξ lev τ tree dist branches body)) |- [τ @@ lev]]. (* NOTE: how we interpret stuff here affects the order-of-side-effects *) intro branches. @@ -605,8 +818,11 @@ Lemma letRecSubproofsToND' Γ Δ ξ lev τ tree : intro pf. intro lrsp. - rewrite mapleaves in disti. - set (@update_ξ_lemma _ Γ ξ lev tree disti) as ξlemma. + assert (distinct (leaves (mapOptionTree (@fst _ _) tree))) as disti'. + apply disti. + rewrite mapleaves in disti'. + + set (@update_ξ_lemma _ Γ ξ lev tree disti') as ξlemma. rewrite <- mapOptionTree_compose in ξlemma. set ((update_ξ ξ lev (leaves tree))) as ξ' in *. @@ -620,7 +836,6 @@ Lemma letRecSubproofsToND' Γ Δ ξ lev τ tree : set (@arrangeContextAndWeaken'' Γ Δ pctx ξ' (expr2antecedent body,,eLetRecContext branches)) as q'. unfold passback in *; clear passback. unfold pctx in *; clear pctx. - rewrite <- mapleaves in disti. set (q' disti) as q''. unfold ξ' in *. @@ -714,7 +929,7 @@ Definition expr2proof : refine (fix expr2proof Γ' Δ' ξ' τ' (exp:Expr Γ' Δ' ξ' τ') {struct exp} : ND Rule [] [Γ' > Δ' > mapOptionTree ξ' (expr2antecedent exp) |- [τ']] := match exp as E in Expr Γ Δ ξ τ with - | EGlobal Γ Δ ξ t wev => let case_EGlobal := tt in _ + | EGlobal Γ Δ ξ g v lev => let case_EGlobal := tt in _ | EVar Γ Δ ξ ev => let case_EVar := tt in _ | ELit Γ Δ ξ lit lev => let case_ELit := tt in _ | EApp Γ Δ ξ t1 t2 lev e1 e2 => let case_EApp := tt in @@ -722,7 +937,7 @@ Definition expr2proof : | ELam Γ Δ ξ t1 t2 lev v e => let case_ELam := tt in (fun e' => _) (expr2proof _ _ _ _ e) | ELet Γ Δ ξ tv t v lev ev ebody => let case_ELet := tt in (fun pf_let pf_body => _) (expr2proof _ _ _ _ ev) (expr2proof _ _ _ _ ebody) - | ELetRec Γ Δ ξ lev t tree branches ebody => + | ELetRec Γ Δ ξ lev t tree disti branches ebody => let ξ' := update_ξ ξ lev (leaves tree) in let case_ELetRec := tt in (fun e' subproofs => _) (expr2proof _ _ _ _ ebody) ((fix subproofs Γ'' Δ'' ξ'' lev'' (tree':Tree ??(VV * HaskType Γ'' ★)) @@ -768,8 +983,7 @@ Definition expr2proof : destruct case_EGlobal. apply nd_rule. simpl. - destruct t as [t lev]. - apply (RGlobal _ _ _ _ wev). + apply (RGlobal _ _ _ g). destruct case_EVar. apply nd_rule. @@ -783,11 +997,11 @@ Definition expr2proof : destruct case_EApp. unfold mapOptionTree; simpl; fold (mapOptionTree ξ). - eapply nd_comp; [ idtac | eapply nd_rule; apply RApp ]. + eapply nd_comp; [ idtac + | eapply nd_rule; + apply (@RApp _ _ _ _ t2 t1) ]. eapply nd_comp; [ apply nd_llecnac | idtac ]. apply nd_prod; auto. - apply e1'. - apply e2'. destruct case_ELam; intros. unfold mapOptionTree; simpl; fold (mapOptionTree ξ). @@ -929,7 +1143,6 @@ Definition expr2proof : unfold ξ'1 in *. clear ξ'1. apply letRecSubproofsToND'. - admit. apply e'. apply subproofs. diff --git a/src/HaskStrongToWeak.v b/src/HaskStrongToWeak.v index e956dd6..25ecd7b 100644 --- a/src/HaskStrongToWeak.v +++ b/src/HaskStrongToWeak.v @@ -10,7 +10,8 @@ Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import Coq.Init.Specif. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskWeakTypes. Require Import HaskWeakVars. Require Import HaskWeak. @@ -61,7 +62,7 @@ Section HaskStrongToWeak. | WTyVarTy ec => return WCodeTy ec t2' | _ => failM "impossible" end - | TyFunApp tfc tls => bind tls' = rawHaskTypeListToWeakType tls + | TyFunApp tfc _ _ tls => bind tls' = rawHaskTypeListToWeakType tls ; return WTyFunApp tfc tls' end with rawHaskTypeListToWeakType {κ}(rht:RawHaskTypeList κ) : UniqM (list WeakType) := @@ -111,7 +112,8 @@ Section HaskStrongToWeak. -> UniqM WeakExpr := match exp as E in @Expr _ _ G D X L return InstantiatedTypeEnv (fun _ => WeakTypeVar) G -> UniqM WeakExpr with | EVar Γ' _ ξ' ev => fun ite => match χ ev with OK v => return WEVar v | Error s => failM s end - | EGlobal Γ' _ ξ' t wev => fun ite => return WEVar wev + | EGlobal Γ' _ ξ' g v lev => fun ite => bind tv' = mapM (ilist_to_list (ilmap (fun κ x => typeToWeakType x ite) v)) + ; return (fold_left (fun x y => WETyApp x y) tv' (WEVar g)) | ELam Γ' _ _ tv _ _ cv e => fun ite => bind tv' = typeToWeakType tv ite ; bind ev' = mkWeakExprVar tv' ; bind e' = exprToWeakExpr (update_χ χ cv ev') e ite @@ -181,7 +183,7 @@ Section HaskStrongToWeak. (fun _ => UniqM WeakType) _ (fun _ t => typeToWeakType t ite) atypes)) ; return WECase vscrut' escrut' tbranches' tc tys branches' - | ELetRec _ _ _ _ _ vars elrb e => fun ite => bind vars' = seqM (map (fun vt:VV * HaskType _ ★ + | ELetRec _ _ _ _ _ vars disti elrb e => fun ite => bind vars' = seqM (map (fun vt:VV * HaskType _ ★ => bind tleaf = typeToWeakType (snd vt) ite ; bind v' = mkWeakExprVar tleaf ; return ((fst vt),v')) diff --git a/src/HaskStrongTypes.v b/src/HaskStrongTypes.v index 224d70b..764e95f 100644 --- a/src/HaskStrongTypes.v +++ b/src/HaskStrongTypes.v @@ -8,7 +8,8 @@ Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import General. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskCoreTypes. Require Import HaskCoreVars. Require Import HaskWeakTypes. @@ -68,8 +69,8 @@ Section Raw. | TCoerc : ∀ κ, RawHaskType κ -> RawHaskType κ -> RawHaskType ★ -> RawHaskType ★ (* (+>) *) | TApp : ∀ κ₁ κ₂, RawHaskType (κ₂⇛κ₁) -> RawHaskType κ₂ -> RawHaskType κ₁ (* φ φ *) | TAll : ∀ κ, (TV κ -> RawHaskType ★) -> RawHaskType ★ (* ∀a:κ.φ *) - | TCode : RawHaskType ★ -> RawHaskType ★ -> RawHaskType ★ (* from λ^α *) - | TyFunApp : ∀ tf, RawHaskTypeList (fst (tyFunKind tf)) -> RawHaskType (snd (tyFunKind tf)) (* S_n *) + | TCode : RawHaskType ECKind -> RawHaskType ★ -> RawHaskType ★ (* from λ^α *) + | TyFunApp : forall (tf:TyFun) kl k, RawHaskTypeList kl -> RawHaskType k (* S_n *) with RawHaskTypeList : list Kind -> Type := | TyFunApp_nil : RawHaskTypeList nil | TyFunApp_cons : ∀ κ kl, RawHaskType κ -> RawHaskTypeList kl -> RawHaskTypeList (κ::kl). @@ -131,7 +132,7 @@ Definition InstantiatedCoercionEnv (TV:Kind->Type) CV (Γ:TypeEnv)(Δ:Coer (* A (HaskXX Γ) is an XX which is valid in environments of shape Γ; they are always PHOAS-uninstantiated *) Definition HaskTyVar (Γ:TypeEnv) κ := forall TV (env:@InstantiatedTypeEnv TV Γ), TV κ. Definition HaskCoVar Γ Δ := forall TV CV (env:@InstantiatedTypeEnv TV Γ)(cenv:@InstantiatedCoercionEnv TV CV Γ Δ), CV. -Definition HaskLevel (Γ:TypeEnv) := list (HaskTyVar Γ ★). +Definition HaskLevel (Γ:TypeEnv) := list (HaskTyVar Γ ECKind). Definition HaskType (Γ:TypeEnv) κ := ∀ TV, @InstantiatedTypeEnv TV Γ -> RawHaskType TV κ. Definition haskTyVarToType {Γ}{κ}(htv:HaskTyVar Γ κ) : HaskType Γ κ := fun TV ite => TVar (htv TV ite). @@ -159,7 +160,7 @@ Definition HaskTAll {Γ}(κ:Kind)(σ:forall TV (env:@InstantiatedTypeEnv TV Γ), Definition HaskTApp {Γ}{κ}(σ:forall TV (env:@InstantiatedTypeEnv TV Γ), TV κ -> RawHaskType TV ★) (cv:HaskTyVar Γ κ) : HaskType Γ ★ := fun TV env => σ TV env (cv TV env). -Definition HaskBrak {Γ}(v:HaskTyVar Γ ★)(t:HaskType Γ ★) : HaskType Γ ★:= +Definition HaskBrak {Γ}(v:HaskTyVar Γ ECKind)(t:HaskType Γ ★) : HaskType Γ ★:= fun TV env => @TCode TV (TVar (v TV env)) (t TV env). Definition HaskTCon {Γ}(tc:TyCon) : HaskType Γ (fold_right KindArrow ★ (tyConKind tc)) := fun TV ite => TCon tc. @@ -168,27 +169,31 @@ Definition HaskAppT {Γ}{κ₁}{κ₂}(t1:HaskType Γ (κ₂⇛κ₁))(t2:HaskTy Definition mkHaskCoercionKind {Γ}{κ}(t1:HaskType Γ κ)(t2:HaskType Γ κ) : HaskCoercionKind Γ := fun TV ite => mkRawCoercionKind _ (t1 TV ite) (t2 TV ite). -(* PHOAS substitution on types *) -Definition substT {Γ}{κ₁}{κ₂}(exp:forall TV (env:@InstantiatedTypeEnv TV Γ), TV κ₁ -> RawHaskType TV κ₂)(v:@HaskType Γ κ₁) - : @HaskType Γ κ₂ := -fun TV env => - (fix flattenT {κ} (exp: RawHaskType (fun k => RawHaskType TV k) κ) : RawHaskType TV κ := +Section Flatten. + Context {TV:Kind -> Type }. +Fixpoint flattenT {κ} (exp: RawHaskType (fun k => RawHaskType TV k) κ) : RawHaskType TV κ := match exp with | TVar _ x => x - | TAll _ y => TAll _ (fun v => flattenT _ (y (TVar v))) - | TApp _ _ x y => TApp (flattenT _ x) (flattenT _ y) + | TAll _ y => TAll _ (fun v => flattenT (y (TVar v))) + | TApp _ _ x y => TApp (flattenT x) (flattenT y) | TCon tc => TCon tc - | TCoerc _ t1 t2 t => TCoerc (flattenT _ t1) (flattenT _ t2) (flattenT _ t) + | TCoerc _ t1 t2 t => TCoerc (flattenT t1) (flattenT t2) (flattenT t) | TArrow => TArrow - | TCode v e => TCode (flattenT _ v) (flattenT _ e) - | TyFunApp tfc lt => TyFunApp tfc (flattenTyFunApp _ lt) + | TCode v e => TCode (flattenT v) (flattenT e) + | TyFunApp tfc kl k lt => TyFunApp tfc kl k (flattenTyFunApp _ lt) end with flattenTyFunApp (lk:list Kind)(exp:@RawHaskTypeList (fun k => RawHaskType TV k) lk) : @RawHaskTypeList TV lk := match exp in @RawHaskTypeList _ LK return @RawHaskTypeList TV LK with | TyFunApp_nil => TyFunApp_nil - | TyFunApp_cons κ kl t rest => TyFunApp_cons _ _ (flattenT _ t) (flattenTyFunApp _ rest) - end - for flattenT) _ (exp (fun k => RawHaskType TV k) (ilmap (fun κ tv => TVar tv) env) (v TV env)). + | TyFunApp_cons κ kl t rest => TyFunApp_cons _ _ (flattenT t) (flattenTyFunApp _ rest) + end. +End Flatten. + +(* PHOAS substitution on types *) +Definition substT {Γ}{κ₁}{κ₂}(exp:forall TV (env:@InstantiatedTypeEnv TV Γ), TV κ₁ -> RawHaskType TV κ₂)(v:@HaskType Γ κ₁) + : @HaskType Γ κ₂ := + fun TV env => + flattenT (exp (fun k => RawHaskType TV k) (ilmap (fun κ tv => TVar tv) env) (v TV env)). Notation "t @@ l" := (@mkLeveledHaskType _ _ t l) (at level 20). Notation "t @@@ l" := (mapOptionTree (fun t' => t' @@ l) t) (at level 20). @@ -197,7 +202,117 @@ Notation "'<[' a '|-' t ']>'" := (@HaskBrak _ a t). Definition unlev {Γ}{κ}(lht:LeveledHaskType Γ κ) := match lht with t@@l => t end. +Structure Global Γ := +{ glob_wv : WeakExprVar +; glob_kinds : list Kind +; glob_tf : IList _ (fun κ => HaskType Γ κ) glob_kinds -> HaskType Γ ★ +}. +Coercion glob_tf : Global >-> Funclass. +Coercion glob_wv : Global >-> WeakExprVar. + +(* From (t1->(t2->(t3-> ... t))), return t1::t2::t3::...nil *) +(* this is a billion times uglier than it needs to be as a result of how primitive Coq's termiation checker is *) +Fixpoint take_arg_types {TV}{κ}(exp: RawHaskType TV κ) {struct exp} : list (RawHaskType TV κ) := + match exp as E in RawHaskType _ K return list (RawHaskType _ K) with + | TApp κ₁ κ₂ x y => + (match κ₁ as K1 return RawHaskType TV (κ₂ ⇛ K1) -> list (RawHaskType TV κ₂) -> list (RawHaskType _ K1) with + | KindStar => + match κ₂ as K2 return RawHaskType TV (K2 ⇛ KindStar) -> list (RawHaskType TV K2) -> list (RawHaskType _ KindStar) with + | KindStar => fun x' => + match x' return list (RawHaskType TV KindStar) -> list (RawHaskType _ KindStar) with + | TApp κ₁'' κ₂'' w'' x'' => + match κ₂'' as K2'' return RawHaskType TV K2'' -> list (RawHaskType TV KindStar) -> + list (RawHaskType _ KindStar) with + | KindStar => + match w'' with + | TArrow => fun a b => a::b + | _ => fun _ _ => nil + end + | _ => fun _ _ => nil + end x'' + | _ => fun _ => nil + end + | _ => fun _ _ => nil + end + | _ => fun _ _ => nil + end) x (take_arg_types y) + | _ => nil + end. +Fixpoint count_arg_types {TV}{κ}(exp: RawHaskType TV κ) {struct exp} : nat := + match exp as E in RawHaskType _ K return nat with + | TApp κ₁ κ₂ x y => + (match κ₁ as K1 return RawHaskType TV (κ₂ ⇛ K1) -> nat -> nat with + | KindStar => + match κ₂ as K2 return RawHaskType TV (K2 ⇛ KindStar) -> nat -> nat with + | KindStar => fun x' => + match x' return nat -> nat with + | TApp κ₁'' κ₂'' w'' x'' => + match κ₂'' as K2'' return RawHaskType TV K2'' -> nat -> nat with + | KindStar => + match w'' with + | TArrow => fun a b => S b + | _ => fun _ _ => 0 + end + | _ => fun _ _ => 0 + end x'' + | _ => fun _ => 0 + end + | _ => fun _ _ => 0 + end + | _ => fun _ _ => 0 + end) x (count_arg_types y) + | _ => 0 + end. + + Definition ite_unit : ∀ Γ, InstantiatedTypeEnv (fun _ => unit) Γ. + intros. + induction Γ. + apply INil. + apply ICons; auto. + apply tt. + Defined. + +Definition take_arg_type {Γ}{κ}(ht:HaskType Γ κ) : (gt (count_arg_types (ht _ (ite_unit _))) 0) -> HaskType Γ κ := + fun pf => + fun TV ite => + match take_arg_types (ht TV ite) with + | nil => Prelude_error "impossible" + | x::y => x + end. + +(* From (t1->(t2->(t3-> ... t))), return t *) +(* this is a billion times uglier than it needs to be as a result of how primitive Coq's termiation checker is *) +Fixpoint drop_arg_types {TV}{κ}(exp: RawHaskType TV κ) : RawHaskType TV κ := + match exp as E in RawHaskType _ K return RawHaskType _ K with + | TApp κ₁ κ₂ x y => + let q := + (match κ₁ as K1 return RawHaskType TV (κ₂ ⇛ K1) -> (RawHaskType TV κ₂) -> ??(RawHaskType _ K1) with + | KindStar => + match κ₂ as K2 return RawHaskType TV (K2 ⇛ KindStar) -> (RawHaskType TV K2) -> ??(RawHaskType _ KindStar) with + | KindStar => fun x' => + match x' return (RawHaskType TV KindStar) -> ??(RawHaskType _ KindStar) with + | TApp κ₁'' κ₂'' w'' x'' => + match κ₂'' as K2'' return RawHaskType TV K2'' -> (RawHaskType TV KindStar) -> ??(RawHaskType _ KindStar) with + | KindStar => + match w'' with + | TArrow => fun _ b => Some b + | _ => fun _ b => None + end + | _ => fun _ b => None + end x'' + | _ => fun _ => None + end + | _ => fun _ _ => None + end + | _ => fun _ _ => None + end) x (drop_arg_types y) + in match q with + | None => TApp x y + | Some y => y + end + | b => b + end. @@ -461,7 +576,7 @@ match t1 with | TArrow => match t2 with TArrow => true | _ => false end | TCode ec t => match t2 with TCode ec' t' => if compareT n ec ec' then compareT n t t' else false | _ => false end | TCoerc _ t1 t2 t => match t2 with TCoerc _ t1' t2' t' => compareT n t1 t1' && compareT n t2 t2' && compareT n t t' | _ =>false end -| TyFunApp tfc lt => match t2 with TyFunApp tfc' lt' => eqd_dec tfc tfc' && compareTL n lt lt' | _ => false end +| TyFunApp tfc kl k lt => match t2 with TyFunApp tfc' kl' k' lt' => eqd_dec tfc tfc' && compareTL n lt lt' | _ => false end end with compareTL (n:nat){κ₁}(t1:@RawHaskTypeList (fun _ => nat) κ₁){κ₂}(t2:@RawHaskTypeList (fun _ => nat) κ₂) : bool := match t1 with @@ -556,7 +671,7 @@ Fixpoint typeToString' (needparens:bool)(n:nat){κ}(t:RawHaskType (fun _ => nat) in "(forall "+++ alpha +++ ":"+++ toString k +++")"+++ typeToString' false (S n) (f n) | TCode ec t => "<["+++(typeToString' true n t)+++"]>@"+++(typeToString' false n ec) - | TyFunApp tfc lt => toString tfc+++ "_" +++ toString n+++" ["+++ + | TyFunApp tfc kl k lt => toString tfc+++ "_" +++ toString n+++" ["+++ (fold_left (fun x y => " \ "+++x+++y) (typeList2string false n lt) "")+++"]" end with typeList2string (needparens:bool)(n:nat){κ}(t:RawHaskTypeList κ) {struct t} : list string := diff --git a/src/HaskTyCons.v b/src/HaskTyCons.v new file mode 100644 index 0000000..617cd59 --- /dev/null +++ b/src/HaskTyCons.v @@ -0,0 +1,32 @@ +(*********************************************************************************************************************************) +(* HaskTyCons: representation of type constructors, type functions, and data constructors *) +(*********************************************************************************************************************************) + +Generalizable All Variables. +Require Import Preamble. +Require Import General. +Require Import Coq.Strings.String. +Require Import HaskKinds. + +Variable CoreDataCon : Type. Extract Inlined Constant CoreDataCon => "DataCon.DataCon". + +(* once again, we pull the trick of having multiple Coq types map to a single Haskell type to provide stronger typing *) +Variable TyCon : Type. Extract Inlined Constant TyCon => "TyCon.TyCon". +Variable TyFun : Type. Extract Inlined Constant TyFun => "TyCon.TyCon". + +Variable CoreName : Type. Extract Inlined Constant CoreName => "Name.Name". +Variable Class_ : Type. Extract Inlined Constant Class_ => "Class.Class". +Variable CoreIPName : Type -> Type. Extract Constant CoreIPName "’a" => "BasicTypes.IPName". + Extraction Inline CoreIPName. + +Variable tyConToString : TyCon -> string. Extract Inlined Constant tyConToString => "outputableToString". +Variable tyFunToString : TyFun -> string. Extract Inlined Constant tyFunToString => "outputableToString". +Instance TyConToString : ToString TyCon := { toString := tyConToString }. +Instance TyFunToString : ToString TyFun := { toString := tyFunToString }. +Instance TyConToLatex : ToLatex TyCon := { toLatex := fun x => toLatex (toString x) }. +Instance TyFunToLatex : ToLatex TyCon := { toLatex := fun x => toLatex (toString x) }. + +Variable ModalBoxTyCon : TyCon. Extract Inlined Constant ModalBoxTyCon => "TysWiredIn.hetMetCodeTypeTyCon". +Variable PairTyCon : TyFun. Extract Inlined Constant PairTyCon => "TysWiredIn.pairTyCon". +Variable UnitTyCon : TyFun. Extract Inlined Constant UnitTyCon => "TysWiredIn.unitTyCon". +Variable ArrowTyCon : TyCon. Extract Constant ArrowTyCon => "Type.funTyCon". diff --git a/src/HaskWeak.v b/src/HaskWeak.v index d5d66c0..85f5b24 100644 --- a/src/HaskWeak.v +++ b/src/HaskWeak.v @@ -7,7 +7,8 @@ Require Import Preamble. Require Import General. Require Import Coq.Lists.List. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskWeakVars. Require Import HaskWeakTypes. diff --git a/src/HaskWeakToCore.v b/src/HaskWeakToCore.v index 290d634..8ceb0b7 100644 --- a/src/HaskWeakToCore.v +++ b/src/HaskWeakToCore.v @@ -8,7 +8,8 @@ Require Import General. Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskCoreVars. Require Import HaskCoreTypes. Require Import HaskCore. @@ -71,7 +72,7 @@ Fixpoint weakExprToCoreExpr (me:WeakExpr) : @CoreExpr CoreVar := | WENote n e => CoreENote n (weakExprToCoreExpr e ) | WELam (weakExprVar ev _ ) e => CoreELam ev (weakExprToCoreExpr e ) | WETyLam (weakTypeVar tv _ ) e => CoreELam tv (weakExprToCoreExpr e ) - | WECoLam (weakCoerVar cv _ _ _) e => CoreELam cv (weakExprToCoreExpr e ) + | WECoLam (weakCoerVar cv _ _) e => CoreELam cv (weakExprToCoreExpr e ) | WECast e co => CoreECast (weakExprToCoreExpr e ) (weakCoercionToCoreCoercion co) | WEBrak v (weakTypeVar ec _) e t => fold_left CoreEApp ((CoreEType (TyVarTy ec)):: diff --git a/src/HaskWeakToStrong.v b/src/HaskWeakToStrong.v index 1b34865..7dd93ad 100644 --- a/src/HaskWeakToStrong.v +++ b/src/HaskWeakToStrong.v @@ -10,7 +10,8 @@ Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import Coq.Init.Specif. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskWeakTypes. Require Import HaskWeakVars. Require Import HaskWeak. @@ -18,6 +19,7 @@ Require Import HaskWeakToCore. Require Import HaskStrongTypes. Require Import HaskStrong. Require Import HaskCoreVars. +Require Import HaskCoreTypes. Open Scope string_scope. Definition TyVarResolver Γ := forall wt:WeakTypeVar, ???(HaskTyVar Γ wt). @@ -124,7 +126,8 @@ Definition weakTypeToType : forall {Γ:TypeEnv}(φ:TyVarResolver Γ)(t:WeakType) | WAppTy t1 t2 => let case_WAppTy := tt in weakTypeToType _ φ t1 >>= fun t1' => weakTypeToType _ φ t2 >>= fun t2' => _ | WTyVarTy v => let case_WTyVarTy := tt in φ v >>= fun v' => _ | WForAllTy wtv t => let case_WForAllTy := tt in weakTypeToType _ (upφ wtv φ) t >>= fun t => _ - | WCodeTy ec tbody => let case_WCodeTy := tt in weakTypeToType _ φ tbody >>= fun tbody' => φ (@fixkind ★ ec) >>= fun ec' => _ + | WCodeTy ec tbody => let case_WCodeTy := tt in weakTypeToType _ φ tbody + >>= fun tbody' => φ (@fixkind ECKind ec) >>= fun ec' => _ | WCoFunTy t1 t2 t3 => let case_WCoFunTy := tt in weakTypeToType _ φ t1 >>= fun t1' => weakTypeToType _ φ t2 >>= fun t2' => @@ -139,7 +142,11 @@ Definition weakTypeToType : forall {Γ:TypeEnv}(φ:TyVarResolver Γ)(t:WeakType) end | tx::lt' => weakTypeToType Γ φ tx >>= fun t' => match lk as LK return ???(forall TV (ite:InstantiatedTypeEnv TV Γ), @RawHaskTypeList TV LK) with - | nil => Error "WTyFunApp applied to too many types" + | nil => Error ("WTyFunApp applied to too many types"(* +++ eol +++ + " tyCon= " +++ toString tc +++ eol +++ + " tyConKindArgs= " +++ toString (fst (tyFunKind tc)) +++ eol +++ + " tyConKindResult= " +++ toString (snd (tyFunKind tc)) +++ eol +++ + " types= " +++ toString lt +++ eol*)) | k::lk' => weakTypeListToTypeList lk' lt' >>= fun rhtl' => let case_weakTypeListToTypeList := tt in _ end @@ -163,7 +170,7 @@ Definition weakTypeToType : forall {Γ:TypeEnv}(φ:TyVarResolver Γ)(t:WeakType) try (matchThings k1'1 k2' "Kind mismatch in WAppTy: "; subst; apply OK; apply (haskTypeOfSomeKind (fun TV env => TApp (t1' TV env) (t2' TV env)))); apply (Error ("Kind mismatch in WAppTy: "+++err)). - + destruct case_weakTypeListToTypeList. apply (addErrorMessage "case_weakTypeListToTypeList"). destruct t' as [ k' t' ]. @@ -176,7 +183,7 @@ Definition weakTypeToType : forall {Γ:TypeEnv}(φ:TyVarResolver Γ)(t:WeakType) apply OK. eapply haskTypeOfSomeKind. unfold HaskType; intros. - apply TyFunApp. + apply (TyFunApp tc (fst (tyFunKind tc)) (snd (tyFunKind tc))). apply lt'. apply X. @@ -414,7 +421,7 @@ Definition castExpr (we:WeakExpr)(err_msg:string) {Γ} {Δ} {ξ} {τ} τ' (e:@Ex Defined. Definition coVarKind (wcv:WeakCoerVar) : Kind := - match wcv with weakCoerVar _ κ _ _ => κ end. + match wcv with weakCoerVar _ t _ => (kindOfCoreType (weakTypeToCoreType t)) end. Coercion coVarKind : WeakCoerVar >-> Kind. Definition weakTypeToTypeOfKind : forall {Γ:TypeEnv}(φ:TyVarResolver Γ)(t:WeakType)(κ:Kind), ???(HaskType Γ κ). @@ -510,7 +517,22 @@ Fixpoint doesWeakVarOccurAlts (wev:WeakExprVar) | T_Branch b1 b2 => doesWeakVarOccurAlts wev b1 || doesWeakVarOccurAlts wev b2 end. -(*Definition ensureCaseBindersAreNotUsed (we:WeakExpr) : UniqM WeakExpr := FIXME *) +Definition checkDistinct : + forall {V}(EQ:EqDecidable V)(lv:list V), ???(distinct lv). + intros. + set (distinct_decidable lv) as q. + destruct q. + exact (OK d). + exact (Error "checkDistinct failed"). + Defined. + +(* FIXME: check the kind of the type of the weakexprvar to support >0 *) +Definition mkGlobal Γ (τ:HaskType Γ ★) (wev:WeakExprVar) : Global Γ. + refine {| glob_kinds := nil |}. + apply wev. + intros. + apply τ. + Defined. Definition weakExprToStrongExpr : forall (Γ:TypeEnv) @@ -537,7 +559,7 @@ Definition weakExprToStrongExpr : forall match we with | WEVar v => if ig v - then OK (EGlobal Γ Δ ξ (τ@@lev) v) + then OK ((EGlobal Γ Δ ξ (mkGlobal Γ τ v) INil lev) : Expr Γ Δ ξ (τ@@lev)) else castExpr we ("WEVar "+++toString (v:CoreVar)) (τ @@ lev) (EVar Γ Δ ξ v) | WELit lit => castExpr we ("WELit "+++toString lit) (τ @@ lev) (ELit Γ Δ ξ lit lev) @@ -614,7 +636,7 @@ Definition weakExprToStrongExpr : forall | _ => Error ("weakTypeToType: WECoApp body with type "+++toString te) end - | WECoLam cv e => let (_,_,t1,t2) := cv in + | WECoLam cv e => let (_,t1,t2) := cv in weakTypeOfWeakExpr e >>= fun te => weakTypeToTypeOfKind φ te ★ >>= fun te' => weakTypeToTypeOfKind φ t1 cv >>= fun t1' => @@ -644,8 +666,9 @@ Definition weakExprToStrongExpr : forall OK (ELR_branch Γ Δ ξ' lev _ _ b1' b2') end) rb in binds >>= fun binds' => + checkDistinct CoreVarEqDecidable (map (@fst _ _) (leaves (varsTypes rb φ))) >>= fun rb_distinct => weakExprToStrongExpr Γ Δ φ ψ ξ' ig' τ lev e >>= fun e' => - OK (ELetRec Γ Δ ξ lev τ _ binds' e') + OK (ELetRec Γ Δ ξ lev τ _ _ binds' e') | WECase vscrut escrut tbranches tc avars alts => weakTypeOfWeakExpr escrut >>= fun tscrut => @@ -700,6 +723,8 @@ Definition weakExprToStrongExpr : forall destruct (ξ c). simpl. apply e1. + rewrite mapleaves. + apply rb_distinct. destruct case_pf. set (distinct_decidable (vec2list exprvars')) as dec. diff --git a/src/HaskWeakTypes.v b/src/HaskWeakTypes.v index 5b73a41..9ec126e 100644 --- a/src/HaskWeakTypes.v +++ b/src/HaskWeakTypes.v @@ -8,7 +8,8 @@ Require Import General. Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskCoreVars. (* a WeakTypeVar merely wraps a CoreVar and includes its Kind *) @@ -46,7 +47,7 @@ Instance WeakTypeVarEqDecidable : EqDecidable WeakTypeVar. Defined. (* a WeakCoerVar just wraps a CoreVar and tags it with the pair of types amongst which it coerces *) -Inductive WeakCoerVar := weakCoerVar : CoreVar -> Kind -> WeakType -> WeakType -> WeakCoerVar. +Inductive WeakCoerVar := weakCoerVar : CoreVar -> WeakType -> WeakType -> WeakCoerVar. Inductive WeakCoercion : Type := | WCoVar : WeakCoerVar -> WeakCoercion (* g *) @@ -65,7 +66,7 @@ Inductive WeakCoercion : Type := Fixpoint weakCoercionTypes (wc:WeakCoercion) : WeakType * WeakType := match wc with -| WCoVar (weakCoerVar _ _ t1 t2) => (t1,t2) +| WCoVar (weakCoerVar _ t1 t2) => (WFunTyCon,WFunTyCon) (* FIXME!!! *) | WCoType t => (WFunTyCon,WFunTyCon) (* FIXME!!! *) | WCoApp c1 c2 => (WFunTyCon,WFunTyCon) (* FIXME!!! *) | WCoAppT c t => (WFunTyCon,WFunTyCon) (* FIXME!!! *) diff --git a/src/HaskWeakVars.v b/src/HaskWeakVars.v index 5169046..3fb7a4b 100644 --- a/src/HaskWeakVars.v +++ b/src/HaskWeakVars.v @@ -8,7 +8,8 @@ Require Import Coq.Strings.String. Require Import Coq.Lists.List. Require Import General. Require Import HaskKinds. -Require Import HaskLiteralsAndTyCons. +Require Import HaskLiterals. +Require Import HaskTyCons. Require Import HaskCoreVars. Require Import HaskCoreTypes. Require Import HaskWeakTypes. @@ -31,9 +32,9 @@ Definition weakTypeVarToKind (tv:WeakTypeVar) : Kind := Definition weakVarToCoreVar (wv:WeakVar) : CoreVar := match wv with - | WExprVar (weakExprVar v _ ) => v - | WTypeVar (weakTypeVar v _ ) => v - | WCoerVar (weakCoerVar v _ _ _) => v + | WExprVar (weakExprVar v _ ) => v + | WTypeVar (weakTypeVar v _ ) => v + | WCoerVar (weakCoerVar v _ _) => v end. Coercion weakVarToCoreVar : WeakVar >-> CoreVar. @@ -48,10 +49,10 @@ Definition tyConTyVars (tc:CoreTyCon) := Opaque tyConTyVars. Definition tyConKind (tc:TyCon) : list Kind := map (fun (x:WeakTypeVar) => x:Kind) (tyConTyVars tc). -Variable rawTyFunKind : CoreTyCon -> Kind. Extract Inlined Constant rawTyFunKind => "(coreKindToKind . TyCon.tyConKind)". +Variable rawTyFunKind : CoreTyCon -> ((list Kind) * Kind). Extract Inlined Constant rawTyFunKind => "rawTyFunKind". Definition tyFunKind (tc:TyFun) : ((list Kind) * Kind) := - splitKind (rawTyFunKind tc). + rawTyFunKind tc. Instance WeakVarToString : ToString WeakVar := { toString := fun x => toString (weakVarToCoreVar x) }. diff --git a/src/NaturalDeduction.v b/src/NaturalDeduction.v index 56d74cd..3aeb7db 100644 --- a/src/NaturalDeduction.v +++ b/src/NaturalDeduction.v @@ -229,18 +229,7 @@ Section Natural_Deduction. Hint Constructors Structural. Hint Constructors BuiltFrom. Hint Constructors NDPredicateClosure. - - Hint Extern 1 => apply nd_structural_id0. - Hint Extern 1 => apply nd_structural_id1. - Hint Extern 1 => apply nd_structural_cancell. - Hint Extern 1 => apply nd_structural_cancelr. - Hint Extern 1 => apply nd_structural_llecnac. - Hint Extern 1 => apply nd_structural_rlecnac. - Hint Extern 1 => apply nd_structural_assoc. - Hint Extern 1 => apply nd_structural_cossa. - Hint Extern 1 => apply ndpc_p. - Hint Extern 1 => apply ndpc_prod. - Hint Extern 1 => apply ndpc_comp. + Hint Unfold StructuralND. Lemma nd_id_structural : forall sl, StructuralND (nd_id sl). intros. @@ -325,53 +314,6 @@ Section Natural_Deduction. inversion bogus. Defined. - (* a "ClosedSIND" is a proof with no open hypotheses and no multi-conclusion rules *) - Inductive ClosedSIND : Tree ??Judgment -> Type := - | cnd_weak : ClosedSIND [] - | cnd_rule : forall h c , ClosedSIND h -> Rule h c -> ClosedSIND c - | cnd_branch : forall c1 c2, ClosedSIND c1 -> ClosedSIND c2 -> ClosedSIND (c1,,c2) - . - - (* we can turn an SIND without hypotheses into a ClosedSIND *) - Definition closedFromSIND h c (pn2:SIND h c)(cnd:ClosedSIND h) : ClosedSIND c. - refine ((fix closedFromPnodes h c (pn2:SIND h c)(cnd:ClosedSIND h) {struct pn2} := - (match pn2 in SIND H C return H=h -> C=c -> _ with - | scnd_weak c => let case_weak := tt in _ - | scnd_comp ht ct c pn' rule => let case_comp := tt in let qq := closedFromPnodes _ _ pn' in _ - | scnd_branch ht c1 c2 pn' pn'' => let case_branch := tt in - let q1 := closedFromPnodes _ _ pn' in - let q2 := closedFromPnodes _ _ pn'' in _ - - end (refl_equal _) (refl_equal _))) h c pn2 cnd). - - destruct case_weak. - intros; subst. - apply cnd_weak. - - destruct case_comp. - intros. - clear pn2. - apply (cnd_rule ct). - apply qq. - subst. - apply cnd0. - apply rule. - - destruct case_branch. - intros. - apply cnd_branch. - apply q1. subst. apply cnd0. - apply q2. subst. apply cnd0. - Defined. - - (* undo the above *) - Fixpoint closedNDtoNormalND {c}(cnd:ClosedSIND c) : ND [] c := - match cnd in ClosedSIND C return ND [] C with - | cnd_weak => nd_id0 - | cnd_rule h c cndh rhc => closedNDtoNormalND cndh ;; nd_rule rhc - | cnd_branch c1 c2 cnd1 cnd2 => nd_llecnac ;; nd_prod (closedNDtoNormalND cnd1) (closedNDtoNormalND cnd2) - end. - (* Natural Deduction systems whose judgments happen to be pairs of the same type *) Section SequentND. Context {S:Type}. (* type of sequent components *) @@ -511,42 +453,6 @@ Coercion sndr_ndr : SequentND_Relation >-> ND_Relation. Coercion cndr_sndr : ContextND_Relation >-> SequentND_Relation. Implicit Arguments ND [ Judgment ]. -Hint Constructors Structural. -Hint Extern 1 => apply nd_id_structural. -Hint Extern 1 => apply ndr_builtfrom_structural. -Hint Extern 1 => apply nd_structural_id0. -Hint Extern 1 => apply nd_structural_id1. -Hint Extern 1 => apply nd_structural_cancell. -Hint Extern 1 => apply nd_structural_cancelr. -Hint Extern 1 => apply nd_structural_llecnac. -Hint Extern 1 => apply nd_structural_rlecnac. -Hint Extern 1 => apply nd_structural_assoc. -Hint Extern 1 => apply nd_structural_cossa. -Hint Extern 1 => apply ndpc_p. -Hint Extern 1 => apply ndpc_prod. -Hint Extern 1 => apply ndpc_comp. -Hint Extern 1 => apply builtfrom_refl. -Hint Extern 1 => apply builtfrom_prod1. -Hint Extern 1 => apply builtfrom_prod2. -Hint Extern 1 => apply builtfrom_comp1. -Hint Extern 1 => apply builtfrom_comp2. -Hint Extern 1 => apply builtfrom_P. - -Hint Extern 1 => apply snd_inert_initial. -Hint Extern 1 => apply snd_inert_cut. -Hint Extern 1 => apply snd_inert_structural. - -Hint Extern 1 => apply cnd_inert_initial. -Hint Extern 1 => apply cnd_inert_cut. -Hint Extern 1 => apply cnd_inert_structural. -Hint Extern 1 => apply cnd_inert_cnd_ant_assoc. -Hint Extern 1 => apply cnd_inert_cnd_ant_cossa. -Hint Extern 1 => apply cnd_inert_cnd_ant_cancell. -Hint Extern 1 => apply cnd_inert_cnd_ant_cancelr. -Hint Extern 1 => apply cnd_inert_cnd_ant_llecnac. -Hint Extern 1 => apply cnd_inert_cnd_ant_rlecnac. -Hint Extern 1 => apply cnd_inert_se_expand_left. -Hint Extern 1 => apply cnd_inert_se_expand_right. (* This first notation gets its own scope because it can be confusing when we're working with multiple different kinds * of proofs. When only one kind of proof is in use, it's quite helpful though. *) @@ -556,10 +462,39 @@ Notation "a ** b" := (nd_prod a b) : nd_scope. Notation "[# a #]" := (nd_rule a) : nd_scope. Notation "a === b" := (@ndr_eqv _ _ _ _ _ a b) : nd_scope. +Hint Constructors Structural. +Hint Constructors ND_Relation. +Hint Constructors BuiltFrom. +Hint Constructors NDPredicateClosure. +Hint Constructors ContextND_Inert. +Hint Constructors SequentND_Inert. +Hint Unfold StructuralND. + (* enable setoid rewriting *) Open Scope nd_scope. Open Scope pf_scope. +Hint Extern 2 (StructuralND (nd_id _)) => apply nd_id_structural. +Hint Extern 2 (NDPredicateClosure _ ( _ ;; _ ) ) => apply ndpc_comp. +Hint Extern 2 (NDPredicateClosure _ ( _ ** _ ) ) => apply ndpc_prod. +Hint Extern 2 (NDPredicateClosure (@Structural _ _) (nd_id _)) => apply nd_id_structural. +Hint Extern 2 (BuiltFrom _ _ ( _ ;; _ ) ) => apply builtfrom_comp1. +Hint Extern 2 (BuiltFrom _ _ ( _ ;; _ ) ) => apply builtfrom_comp2. +Hint Extern 2 (BuiltFrom _ _ ( _ ** _ ) ) => apply builtfrom_prod1. +Hint Extern 2 (BuiltFrom _ _ ( _ ** _ ) ) => apply builtfrom_prod2. + +(* Hint Constructors has failed me! *) +Hint Extern 2 (@Structural _ _ _ _ (@nd_id0 _ _)) => apply nd_structural_id0. +Hint Extern 2 (@Structural _ _ _ _ (@nd_id1 _ _ _)) => apply nd_structural_id1. +Hint Extern 2 (@Structural _ _ _ _ (@nd_cancell _ _ _)) => apply nd_structural_cancell. +Hint Extern 2 (@Structural _ _ _ _ (@nd_cancelr _ _ _)) => apply nd_structural_cancelr. +Hint Extern 2 (@Structural _ _ _ _ (@nd_llecnac _ _ _)) => apply nd_structural_llecnac. +Hint Extern 2 (@Structural _ _ _ _ (@nd_rlecnac _ _ _)) => apply nd_structural_rlecnac. +Hint Extern 2 (@Structural _ _ _ _ (@nd_assoc _ _ _ _ _)) => apply nd_structural_assoc. +Hint Extern 2 (@Structural _ _ _ _ (@nd_cossa _ _ _ _ _)) => apply nd_structural_cossa. + +Hint Extern 4 (NDPredicateClosure _ _) => apply ndpc_p. + Add Parametric Relation {jt rt ndr h c} : (h/⋯⋯/c) (@ndr_eqv jt rt ndr h c) reflexivity proved by (@Equivalence_Reflexive _ _ (ndr_eqv_equivalence h c)) symmetry proved by (@Equivalence_Symmetric _ _ (ndr_eqv_equivalence h c)) @@ -581,7 +516,8 @@ Section ND_Relation_Facts. (* useful *) Lemma ndr_comp_right_identity : forall h c (f:h/⋯⋯/c), ndr_eqv (f ;; nd_id c) f. - intros; apply (ndr_builtfrom_structural f); auto. + intros; apply (ndr_builtfrom_structural f). auto. + auto. Defined. (* useful *) @@ -589,6 +525,44 @@ Section ND_Relation_Facts. intros; apply (ndr_builtfrom_structural f); auto. Defined. + Ltac nd_prod_preserves_comp_ltac P EQV := + match goal with + [ |- context [ (?A ** ?B) ;; (?C ** ?D) ] ] => + set (@ndr_prod_preserves_comp _ _ EQV _ _ A _ _ B _ C _ D) as P + end. + + Lemma nd_swap A B C D (f:ND _ A B) (g:ND _ C D) : + (f ** nd_id C) ;; (nd_id B ** g) === + (nd_id A ** g) ;; (f ** nd_id D). + setoid_rewrite <- ndr_prod_preserves_comp. + setoid_rewrite ndr_comp_left_identity. + setoid_rewrite ndr_comp_right_identity. + reflexivity. + Qed. + + (* this tactical searches the environment; setoid_rewrite doesn't seem to be able to do that properly sometimes *) + Ltac nd_swap_ltac P EQV := + match goal with + [ |- context [ (?F ** nd_id _) ;; (nd_id _ ** ?G) ] ] => + set (@nd_swap _ _ EQV _ _ _ _ F G) as P + end. + + Lemma nd_prod_split_left A B C D (f:ND _ A B) (g:ND _ B C) : + nd_id D ** (f ;; g) === + (nd_id D ** f) ;; (nd_id D ** g). + setoid_rewrite <- ndr_prod_preserves_comp. + setoid_rewrite ndr_comp_left_identity. + reflexivity. + Qed. + + Lemma nd_prod_split_right A B C D (f:ND _ A B) (g:ND _ B C) : + (f ;; g) ** nd_id D === + (f ** nd_id D) ;; (g ** nd_id D). + setoid_rewrite <- ndr_prod_preserves_comp. + setoid_rewrite ndr_comp_left_identity. + reflexivity. + Qed. + End ND_Relation_Facts. (* a generalization of the procedure used to build (nd_id n) from nd_id0 and nd_id1 *) @@ -713,19 +687,6 @@ Inductive nd_property {Judgment}{Rule}(P:forall h c, @Rule h c -> Prop) : forall | nd_property_rule : forall h c r, P h c r -> @nd_property _ _ P h c (nd_rule r). Hint Constructors nd_property. -(* witnesses the fact that every Rule in a particular proof satisfies the given predicate (for ClosedSIND) *) -Inductive cnd_property {Judgment}{Rule}(P:forall h c, @Rule h c -> Prop) : forall {c}, @ClosedSIND Judgment Rule c -> Prop := -| cnd_property_weak : @cnd_property _ _ P _ cnd_weak -| cnd_property_rule : forall h c r cnd', - P h c r -> - @cnd_property _ _ P h cnd' -> - @cnd_property _ _ P c (cnd_rule _ _ cnd' r) -| cnd_property_branch : - forall c1 c2 cnd1 cnd2, - @cnd_property _ _ P c1 cnd1 -> - @cnd_property _ _ P c2 cnd2 -> - @cnd_property _ _ P _ (cnd_branch _ _ cnd1 cnd2). - (* witnesses the fact that every Rule in a particular proof satisfies the given predicate (for SIND) *) Inductive scnd_property {Judgment}{Rule}(P:forall h c, @Rule h c -> Prop) : forall {h c}, @SIND Judgment Rule h c -> Prop := | scnd_property_weak : forall c, @scnd_property _ _ P _ _ (scnd_weak c) diff --git a/src/NaturalDeductionCategory.v b/src/NaturalDeductionCategory.v index d721a97..9360bfa 100644 --- a/src/NaturalDeductionCategory.v +++ b/src/NaturalDeductionCategory.v @@ -136,9 +136,9 @@ Section Judgments_Category. ; pmon_assoc_ll := jud_mon_assoc_ll }. unfold functor_fobj; unfold fmor; simpl; - apply Build_Pentagon; simpl; intros; apply (ndr_builtfrom_structural nd_id0); auto. + apply Build_Pentagon; simpl; intros; apply (ndr_builtfrom_structural nd_id0); auto 10. unfold functor_fobj; unfold fmor; simpl; - apply Build_Triangle; simpl; intros; apply (ndr_builtfrom_structural nd_id0); auto. + apply Build_Triangle; simpl; intros; apply (ndr_builtfrom_structural nd_id0); auto 10. intros; unfold eqv; simpl; auto; reflexivity. intros; unfold eqv; simpl; auto; reflexivity. intros; unfold eqv; simpl; apply Judgments_Category_Commutative. diff --git a/src/HaskProofStratified.v b/src/PCF.v similarity index 50% rename from src/HaskProofStratified.v rename to src/PCF.v index ee475da..5caf2dc 100644 --- a/src/HaskProofStratified.v +++ b/src/PCF.v @@ -1,5 +1,5 @@ (*********************************************************************************************************************************) -(* HaskProofStratified: *) +(* PCF: *) (* *) (* An alternate representation for HaskProof which ensures that deductions on a given level are grouped into contiguous *) (* blocks. This representation lacks the attractive compositionality properties of HaskProof, but makes it easier to *) @@ -14,14 +14,6 @@ Require Import NaturalDeduction. Require Import Coq.Strings.String. Require Import Coq.Lists.List. -Require Import HaskKinds. -Require Import HaskCoreTypes. -Require Import HaskLiteralsAndTyCons. -Require Import HaskStrongTypes. -Require Import HaskProof. -Require Import NaturalDeduction. -Require Import NaturalDeductionCategory. - Require Import Algebras_ch4. Require Import Categories_ch1_3. Require Import Functors_ch1_4. @@ -35,6 +27,15 @@ Require Import NaturalIsomorphisms_ch7_5. Require Import MonoidalCategories_ch7_8. Require Import Coherence_ch7_8. +Require Import HaskKinds. +Require Import HaskCoreTypes. +Require Import HaskLiterals. +Require Import HaskTyCons. +Require Import HaskStrongTypes. +Require Import HaskProof. +Require Import NaturalDeduction. +Require Import NaturalDeductionCategory. + Require Import HaskStrongTypes. Require Import HaskStrong. Require Import HaskProof. @@ -53,16 +54,16 @@ Open Scope nd_scope. * "PCF" means "the level-1 sublanguage" and "FC" (aka System FC) means * the whole language (level-0 language including bracketed level-1 terms) *) -Section HaskProofStratified. +Section PCF. Section PCF. - Context (ndr_systemfc:@ND_Relation _ Rule). - + Context {ndr_systemfc:@ND_Relation _ Rule}. Context Γ (Δ:CoercionEnv Γ). - Definition PCFJudg (ec:HaskTyVar Γ ★) := + + Definition PCFJudg (ec:HaskTyVar Γ ECKind) := @prod (Tree ??(HaskType Γ ★)) (Tree ??(HaskType Γ ★)). - Definition pcfjudg (ec:HaskTyVar Γ ★) := + Definition pcfjudg (ec:HaskTyVar Γ ECKind) := @pair (Tree ??(HaskType Γ ★)) (Tree ??(HaskType Γ ★)). (* given an PCFJudg at depth (ec::depth) we can turn it into an PCFJudg @@ -73,7 +74,7 @@ Section HaskProofStratified. (Σ,τ) => Γ > Δ > (Σ@@@(ec::nil)) |- (mapOptionTree (fun t => HaskBrak ec t) τ @@@ nil) end. - Definition pcf_vars {Γ}(ec:HaskTyVar Γ ★)(t:Tree ??(LeveledHaskType Γ ★)) : Tree ??(HaskType Γ ★) + Definition pcf_vars {Γ}(ec:HaskTyVar Γ ECKind)(t:Tree ??(LeveledHaskType Γ ★)) : Tree ??(HaskType Γ ★) := mapOptionTreeAndFlatten (fun lt => match lt with t @@ l => match l with | ec'::nil => if eqd_dec ec ec' then [t] else [] @@ -90,21 +91,13 @@ Section HaskProofStratified. [((pcf_vars ec Σ) , τ )] [Γ > Δ > Σ |- (mapOptionTree (HaskBrak ec) τ @@@ lev)]. - Definition fc_vars {Γ}(ec:HaskTyVar Γ ★)(t:Tree ??(LeveledHaskType Γ ★)) : Tree ??(HaskType Γ ★) - := mapOptionTreeAndFlatten (fun lt => - match lt with t @@ l => match l with - | ec'::nil => if eqd_dec ec ec' then [] else [t] - | _ => [] - end - end) t. - Definition pcfjudg2judg ec (cj:PCFJudg ec) := match cj with (Σ,τ) => Γ > Δ > (Σ @@@ (ec::nil)) |- (τ @@@ (ec::nil)) end. (* Rules allowed in PCF; i.e. rules we know how to turn into GArrows *) (* Rule_PCF consists of the rules allowed in flat PCF: everything except *) (* AppT, AbsT, AppC, AbsC, Cast, Global, and some Case statements *) - Inductive Rule_PCF (ec:HaskTyVar Γ ★) + Inductive Rule_PCF (ec:HaskTyVar Γ ECKind) : forall (h c:Tree ??(PCFJudg ec)), Rule (mapOptionTree (pcfjudg2judg ec) h) (mapOptionTree (pcfjudg2judg ec) c) -> Type := | PCF_RArrange : ∀ x y t a, Rule_PCF ec [(_, _)] [(_, _)] (RArrange Γ Δ (x@@@(ec::nil)) (y@@@(ec::nil)) (t@@@(ec::nil)) a) | PCF_RLit : ∀ lit , Rule_PCF ec [ ] [ ([],[_]) ] (RLit Γ Δ lit (ec::nil)) @@ -130,33 +123,6 @@ Section HaskProofStratified. Definition PCFRule lev h c := { r:_ & @Rule_PCF lev h c r }. End PCF. - Definition FCJudg Γ (Δ:CoercionEnv Γ) := - @prod (Tree ??(LeveledHaskType Γ ★)) (Tree ??(LeveledHaskType Γ ★)). - Definition fcjudg2judg {Γ}{Δ}(fc:FCJudg Γ Δ) := - match fc with - (x,y) => Γ > Δ > x |- y - end. - Coercion fcjudg2judg : FCJudg >-> Judg. - - Definition pcfjudg2fcjudg {Γ}{Δ} ec (fc:PCFJudg Γ ec) : FCJudg Γ Δ := - match fc with - (x,y) => (x @@@ (ec::nil),y @@@ (ec::nil)) - end. - - (* An organized deduction has been reorganized into contiguous blocks whose - * hypotheses (if any) and conclusion have the same Γ and Δ and a fixed nesting depth. The boolean - * indicates if non-PCF rules have been used *) - Inductive OrgR Γ Δ : Tree ??(FCJudg Γ Δ) -> Tree ??(FCJudg Γ Δ) -> Type := - - | org_fc : forall (h c:Tree ??(FCJudg Γ Δ)) - (r:Rule (mapOptionTree fcjudg2judg h) (mapOptionTree fcjudg2judg c)), - Rule_Flat r -> - OrgR _ _ h c - - | org_pcf : forall ec h c, - ND (PCFRule Γ Δ ec) h c -> - OrgR Γ Δ (mapOptionTree (pcfjudg2fcjudg ec) h) (mapOptionTree (pcfjudg2fcjudg ec) c). - Definition mkEsc Γ Δ ec (h:Tree ??(PCFJudg Γ ec)) : ND Rule (mapOptionTree (brakify Γ Δ) h) @@ -185,13 +151,6 @@ Section HaskProofStratified. apply (Prelude_error "mkBrak got multi-leaf succedent"). Defined. - (* - Definition Partition {Γ} ec (Σ:Tree ??(LeveledHaskType Γ ★)) := - { vars:(_ * _) | - fc_vars ec Σ = fst vars /\ - pcf_vars ec Σ = snd vars }. - *) - Definition pcfToND Γ Δ : forall ec h c, ND (PCFRule Γ Δ ec) h c -> ND Rule (mapOptionTree (pcfjudg2judg Γ Δ ec) h) (mapOptionTree (pcfjudg2judg Γ Δ ec) c). intros. @@ -206,137 +165,6 @@ Section HaskProofStratified. { ndr_eqv := fun a b f g => (pcfToND _ _ _ _ _ f) === (pcfToND _ _ _ _ _ g) }. Admitted. - (* - * An intermediate representation necessitated by Coq's termination - * conditions. This is basically a tree where each node is a - * subproof which is either entirely level-1 or entirely level-0 - *) - Inductive Alternating : Tree ??Judg -> Type := - - | alt_nil : Alternating [] - - | alt_branch : forall a b, - Alternating a -> Alternating b -> Alternating (a,,b) - - | alt_fc : forall h c, - Alternating h -> - ND Rule h c -> - Alternating c - - | alt_pcf : forall Γ Δ ec h c h' c', - MatchingJudgments Γ Δ h h' -> - MatchingJudgments Γ Δ c c' -> - Alternating h' -> - ND (PCFRule Γ Δ ec) h c -> - Alternating c'. - - Require Import Coq.Logic.Eqdep. - - Lemma magic a b c d ec e : - ClosedSIND(Rule:=Rule) [a > b > c |- [d @@ (ec :: e)]] -> - ClosedSIND(Rule:=Rule) [a > b > pcf_vars ec c @@@ (ec :: nil) |- [d @@ (ec :: nil)]]. - admit. - Defined. - - Definition orgify : forall Γ Δ Σ τ (pf:ClosedSIND(Rule:=Rule) [Γ > Δ > Σ |- τ]), Alternating [Γ > Δ > Σ |- τ]. - - refine ( - fix orgify_fc' Γ Δ Σ τ (pf:ClosedSIND [Γ > Δ > Σ |- τ]) {struct pf} : Alternating [Γ > Δ > Σ |- τ] := - let case_main := tt in _ - with orgify_fc c (pf:ClosedSIND c) {struct pf} : Alternating c := - (match c as C return C=c -> Alternating C with - | T_Leaf None => fun _ => alt_nil - | T_Leaf (Some (Γ > Δ > Σ |- τ)) => let case_leaf := tt in fun eqpf => _ - | T_Branch b1 b2 => let case_branch := tt in fun eqpf => _ - end (refl_equal _)) - with orgify_pcf Γ Δ ec pcfj j (m:MatchingJudgments Γ Δ pcfj j) - (pf:ClosedSIND (mapOptionTree (pcfjudg2judg Γ Δ ec) pcfj)) {struct pf} : Alternating j := - let case_pcf := tt in _ - for orgify_fc'). - - destruct case_main. - inversion pf; subst. - set (alt_fc _ _ (orgify_fc _ X) (nd_rule X0)) as backup. - refine (match X0 as R in Rule H C return - match C with - | T_Leaf (Some (Γ > Δ > Σ |- τ)) => - h=H -> Alternating [Γ > Δ > Σ |- τ] -> Alternating [Γ > Δ > Σ |- τ] - | _ => True - end - with - | RBrak Σ a b c n m => let case_RBrak := tt in fun pf' backup => _ - | REsc Σ a b c n m => let case_REsc := tt in fun pf' backup => _ - | _ => fun pf' x => x - end (refl_equal _) backup). - clear backup0 backup. - - destruct case_RBrak. - rename c into ec. - set (@match_leaf Σ0 a ec n [b] m) as q. - set (orgify_pcf Σ0 a ec _ _ q) as q'. - apply q'. - simpl. - rewrite pf' in X. - apply magic in X. - apply X. - - destruct case_REsc. - apply (Prelude_error "encountered Esc in wrong side of mkalt"). - - destruct case_leaf. - apply orgify_fc'. - rewrite eqpf. - apply pf. - - destruct case_branch. - rewrite <- eqpf in pf. - inversion pf; subst. - apply no_rules_with_multiple_conclusions in X0. - inversion X0. - exists b1. exists b2. - auto. - apply (alt_branch _ _ (orgify_fc _ X) (orgify_fc _ X0)). - - destruct case_pcf. - Admitted. - - Definition pcfify Γ Δ ec : forall Σ τ, - ClosedSIND(Rule:=Rule) [ Γ > Δ > Σ@@@(ec::nil) |- τ @@@ (ec::nil)] - -> ND (PCFRule Γ Δ ec) [] [(Σ,τ)]. - - refine (( - fix pcfify Σ τ (pn:@ClosedSIND _ Rule [ Γ > Δ > Σ@@@(ec::nil) |- τ @@@ (ec::nil)]) {struct pn} - : ND (PCFRule Γ Δ ec) [] [(Σ,τ)] := - (match pn in @ClosedSIND _ _ J return J=[Γ > Δ > Σ@@@(ec::nil) |- τ @@@ (ec::nil)] -> _ with - | cnd_weak => let case_nil := tt in _ - | cnd_rule h c cnd' r => let case_rule := tt in _ - | cnd_branch _ _ c1 c2 => let case_branch := tt in _ - end (refl_equal _)))). - intros. - inversion H. - intros. - destruct c; try destruct o; inversion H. - destruct j. - Admitted. - - (* any proof in organized form can be "dis-organized" *) - (* - Definition unOrgR : forall Γ Δ h c, OrgR Γ Δ h c -> ND Rule h c. - intros. - induction X. - apply nd_rule. - apply r. - eapply nd_comp. - (* - apply (mkEsc h). - eapply nd_comp; [ idtac | apply (mkBrak c) ]. - apply pcfToND. - apply n. - *) - Admitted. - Definition unOrgND Γ Δ h c : ND (OrgR Γ Δ) h c -> ND Rule h c := nd_map (unOrgR Γ Δ). - *) - Hint Constructors Rule_Flat. Definition PCF_Arrange {Γ}{Δ}{lev} : forall x y z, Arrange x y -> ND (PCFRule Γ Δ lev) [(x,z)] [(y,z)]. @@ -452,137 +280,4 @@ Section HaskProofStratified. ; pl_eqv := OrgPCF_ContextND_Relation Γ Δ lev }. - Definition SystemFCa_cut Γ Δ : forall a b c, ND (OrgR Γ Δ) ([(a,b)],,[(b,c)]) [(a,c)]. - intros. - destruct b. - destruct o. - destruct c. - destruct o. - - (* when the cut is a single leaf and the RHS is a single leaf: *) - (* - eapply nd_comp. - eapply nd_prod. - apply nd_id. - eapply nd_rule. - set (@org_fc) as ofc. - set (RArrange Γ Δ _ _ _ (RuCanL [l0])) as rule. - apply org_fc with (r:=RArrange _ _ _ _ _ (RuCanL [_])). - auto. - eapply nd_comp; [ idtac | eapply nd_rule; apply org_fc with (r:=RArrange _ _ _ _ _ (RCanL _)) ]. - apply nd_rule. - destruct l. - destruct l0. - assert (h0=h2). admit. - subst. - apply org_fc with (r:=@RLet Γ Δ [] a h1 h h2). - auto. - auto. - *) - admit. - apply (Prelude_error "systemfc cut rule invoked with [a|=[b]] [[b]|=[]]"). - apply (Prelude_error "systemfc cut rule invoked with [a|=[b]] [[b]|=[x,,y]]"). - apply (Prelude_error "systemfc rule invoked with [a|=[]] [[]|=c]"). - apply (Prelude_error "systemfc rule invoked with [a|=[b,,c]] [[b,,c]|=z]"). - Defined. - - Instance SystemFCa_sequents Γ Δ : @SequentND _ (OrgR Γ Δ) _ _ := - { snd_cut := SystemFCa_cut Γ Δ }. - apply Build_SequentND. - intros. - induction a. - destruct a; simpl. - (* - apply nd_rule. - destruct l. - apply org_fc with (r:=RVar _ _ _ _). - auto. - apply nd_rule. - apply org_fc with (r:=RVoid _ _ ). - auto. - eapply nd_comp. - eapply nd_comp; [ apply nd_llecnac | idtac ]. - apply (nd_prod IHa1 IHa2). - apply nd_rule. - apply org_fc with (r:=RJoin _ _ _ _ _ _). - auto. - admit. - *) - admit. - admit. - admit. - admit. - Defined. - - Definition SystemFCa_left Γ Δ a b c : ND (OrgR Γ Δ) [(b,c)] [((a,,b),(a,,c))]. - admit. - (* - eapply nd_comp; [ apply nd_llecnac | eapply nd_comp; [ idtac | idtac ] ]. - eapply nd_prod; [ apply snd_initial | apply nd_id ]. - apply nd_rule. - apply org_fc with (r:=RJoin Γ Δ a b a c). - auto. - *) - Defined. - - Definition SystemFCa_right Γ Δ a b c : ND (OrgR Γ Δ) [(b,c)] [((b,,a),(c,,a))]. - admit. - (* - eapply nd_comp; [ apply nd_rlecnac | eapply nd_comp; [ idtac | idtac ] ]. - eapply nd_prod; [ apply nd_id | apply snd_initial ]. - apply nd_rule. - apply org_fc with (r:=RJoin Γ Δ b a c a). - auto. - *) - Defined. - - Instance SystemFCa_sequent_join Γ Δ : @ContextND _ _ _ _ (SystemFCa_sequents Γ Δ) := - { cnd_expand_left := fun a b c => SystemFCa_left Γ Δ c a b - ; cnd_expand_right := fun a b c => SystemFCa_right Γ Δ c a b }. - (* - intros; apply nd_rule. simpl. - apply (org_fc _ _ _ _ ((RArrange _ _ _ _ _ (RCossa _ _ _)))). - auto. - - intros; apply nd_rule. simpl. - apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RAssoc _ _ _))); auto. - - intros; apply nd_rule. simpl. - apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RCanL _))); auto. - - intros; apply nd_rule. simpl. - apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RCanR _))); auto. - - intros; apply nd_rule. simpl. - apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RuCanL _))); auto. - - intros; apply nd_rule. simpl. - apply (org_fc _ _ _ _ (RArrange _ _ _ _ _ (RuCanR _))); auto. - *) - admit. - admit. - admit. - admit. - admit. - admit. - Defined. - - Instance OrgFC Γ Δ : @ND_Relation _ (OrgR Γ Δ). - Admitted. - - Instance OrgFC_SequentND_Relation Γ Δ : SequentND_Relation (SystemFCa_sequent_join Γ Δ) (OrgFC Γ Δ). - admit. - Defined. - - Definition OrgFC_ContextND_Relation Γ Δ - : @ContextND_Relation _ _ _ _ _ (SystemFCa_sequent_join Γ Δ) (OrgFC Γ Δ) (OrgFC_SequentND_Relation Γ Δ). - admit. - Defined. - - (* 5.1.2 *) - Instance SystemFCa Γ Δ : @ProgrammingLanguage (LeveledHaskType Γ ★) _ := - { pl_eqv := OrgFC_ContextND_Relation Γ Δ - ; pl_snd := SystemFCa_sequents Γ Δ - }. - -End HaskProofStratified. +End PCF. diff --git a/src/ProgrammingLanguage.v b/src/ProgrammingLanguage.v index 0636a6e..83b435a 100644 --- a/src/ProgrammingLanguage.v +++ b/src/ProgrammingLanguage.v @@ -26,9 +26,7 @@ Require Import Enrichment_ch2_8. Require Import RepresentableStructure_ch7_2. Require Import FunctorCategories_ch7_7. -Require Import Enrichments. Require Import NaturalDeduction. -Require Import NaturalDeductionCategory. Section Programming_Language. @@ -47,218 +45,15 @@ Section Programming_Language. Open Scope pl_scope. Class ProgrammingLanguage := - { pl_eqv0 : @ND_Relation PLJudg Rule + { pl_eqv0 :> @ND_Relation PLJudg Rule ; pl_snd :> @SequentND PLJudg Rule _ sequent ; pl_cnd :> @ContextND PLJudg Rule T sequent pl_snd ; pl_eqv1 :> @SequentND_Relation PLJudg Rule _ sequent pl_snd pl_eqv0 ; pl_eqv :> @ContextND_Relation PLJudg Rule _ sequent pl_snd pl_cnd pl_eqv0 pl_eqv1 }. Notation "pf1 === pf2" := (@ndr_eqv _ _ pl_eqv _ _ pf1 pf2) : temporary_scope3. - - Section LanguageCategory. - - Context (PL:ProgrammingLanguage). - - (* category of judgments in a fixed type/coercion context *) - Definition Judgments_cartesian := @Judgments_Category_CartesianCat _ Rule pl_eqv. - - Definition JudgmentsL := Judgments_cartesian. - - Definition identityProof t : [] ~~{JudgmentsL}~~> [t |= t]. - unfold hom; simpl. - apply snd_initial. - Defined. - - Definition cutProof a b c : [a |= b],,[b |= c] ~~{JudgmentsL}~~> [a |= c]. - unfold hom; simpl. - apply snd_cut. - Defined. - - Existing Instance pl_eqv. - - Definition TypesL : ECategory JudgmentsL (Tree ??T) (fun x y => [x|=y]). - refine - {| eid := identityProof - ; ecomp := cutProof - |}; intros. - apply (mon_commutative(MonoidalCat:=JudgmentsL)). - apply (mon_commutative(MonoidalCat:=JudgmentsL)). - unfold identityProof; unfold cutProof; simpl; eapply cndr_inert. apply pl_eqv. auto. auto. - unfold identityProof; unfold cutProof; simpl; eapply cndr_inert. apply pl_eqv. auto. auto. - unfold identityProof; unfold cutProof; simpl; eapply cndr_inert. apply pl_eqv. auto. auto. - apply ndpc_comp; auto. - apply ndpc_comp; auto. - Defined. - - Instance Types_first c : EFunctor TypesL TypesL (fun x => x,,c ) := - { efunc := fun x y => cnd_expand_right(ContextND:=pl_cnd) x y c }. - intros; apply (mon_commutative(MonoidalCat:=JudgmentsL)). - intros. unfold ehom. unfold hom. unfold identityProof. unfold eid. simpl. unfold identityProof. - apply (cndr_inert pl_cnd); auto. - intros. unfold ehom. unfold comp. simpl. unfold cutProof. - rewrite <- (@ndr_prod_preserves_comp _ _ pl_eqv _ _ (cnd_expand_right _ _ c) _ _ (nd_id1 (b|=c0)) - _ (nd_id1 (a,,c |= b,,c)) _ (cnd_expand_right _ _ c)). - setoid_rewrite (@ndr_comp_right_identity _ _ pl_eqv _ [a,, c |= b,, c]). - setoid_rewrite (@ndr_comp_left_identity _ _ pl_eqv [b |= c0]). - simpl; eapply cndr_inert. apply pl_eqv. auto. auto. - Defined. - - Instance Types_second c : EFunctor TypesL TypesL (fun x => c,,x) := - { efunc := fun x y => ((@cnd_expand_left _ _ _ _ _ _ x y c)) }. - intros; apply (mon_commutative(MonoidalCat:=JudgmentsL)). - intros. unfold ehom. unfold hom. unfold identityProof. unfold eid. simpl. unfold identityProof. - eapply cndr_inert; auto. apply pl_eqv. - intros. unfold ehom. unfold comp. simpl. unfold cutProof. - rewrite <- (@ndr_prod_preserves_comp _ _ pl_eqv _ _ (cnd_expand_left _ _ c) _ _ (nd_id1 (b|=c0)) - _ (nd_id1 (c,,a |= c,,b)) _ (cnd_expand_left _ _ c)). - setoid_rewrite (@ndr_comp_right_identity _ _ pl_eqv _ [c,,a |= c,,b]). - setoid_rewrite (@ndr_comp_left_identity _ _ pl_eqv [b |= c0]). - simpl; eapply cndr_inert. apply pl_eqv. auto. auto. - Defined. - - Definition Types_binoidal : EBinoidalCat TypesL (@T_Branch _). - refine - {| ebc_first := Types_first - ; ebc_second := Types_second - |}. - Defined. - - Instance Types_assoc_iso a b c : Isomorphic(C:=TypesL) ((a,,b),,c) (a,,(b,,c)) := - { iso_forward := snd_initial _ ;; cnd_ant_cossa _ a b c - ; iso_backward := snd_initial _ ;; cnd_ant_assoc _ a b c - }. - simpl; eapply cndr_inert. unfold identityProof; apply pl_eqv. auto. - apply ndpc_comp; auto. - apply ndpc_comp; auto. - auto. - simpl; eapply cndr_inert. unfold identityProof; apply pl_eqv. auto. - apply ndpc_comp; auto. - apply ndpc_comp; auto. - auto. - Defined. - - Instance Types_cancelr_iso a : Isomorphic(C:=TypesL) (a,,[]) a := - { iso_forward := snd_initial _ ;; cnd_ant_rlecnac _ a - ; iso_backward := snd_initial _ ;; cnd_ant_cancelr _ a - }. - unfold eqv; unfold comp; simpl. - eapply cndr_inert. apply pl_eqv. auto. - apply ndpc_comp; auto. - apply ndpc_comp; auto. - auto. - unfold eqv; unfold comp; simpl. - eapply cndr_inert. apply pl_eqv. auto. - apply ndpc_comp; auto. - apply ndpc_comp; auto. - auto. - Defined. - - Instance Types_cancell_iso a : Isomorphic(C:=TypesL) ([],,a) a := - { iso_forward := snd_initial _ ;; cnd_ant_llecnac _ a - ; iso_backward := snd_initial _ ;; cnd_ant_cancell _ a - }. - unfold eqv; unfold comp; simpl. - eapply cndr_inert. apply pl_eqv. auto. - apply ndpc_comp; auto. - apply ndpc_comp; auto. - auto. - unfold eqv; unfold comp; simpl. - eapply cndr_inert. apply pl_eqv. auto. - apply ndpc_comp; auto. - apply ndpc_comp; auto. - auto. - Defined. - - Instance Types_assoc a b : Types_second a >>>> Types_first b <~~~> Types_first b >>>> Types_second a := - { ni_iso := fun c => Types_assoc_iso a c b }. - admit. (* need to add this as an obligation in ProgrammingLanguage class *) - Defined. - - Instance Types_cancelr : Types_first [] <~~~> functor_id _ := - { ni_iso := Types_cancelr_iso }. - intros; simpl. - admit. (* need to add this as an obligation in ProgrammingLanguage class *) - Defined. - - Instance Types_cancell : Types_second [] <~~~> functor_id _ := - { ni_iso := Types_cancell_iso }. - admit. (* need to add this as an obligation in ProgrammingLanguage class *) - Defined. - - Instance Types_assoc_ll a b : Types_second (a,,b) <~~~> Types_second b >>>> Types_second a := - { ni_iso := fun c => Types_assoc_iso a b c }. - admit. (* need to add this as an obligation in ProgrammingLanguage class *) - Defined. - - Instance Types_assoc_rr a b : Types_first (a,,b) <~~~> Types_first a >>>> Types_first b := - { ni_iso := fun c => iso_inv _ _ (Types_assoc_iso c a b) }. - admit. (* need to add this as an obligation in ProgrammingLanguage class *) - Defined. - - Instance TypesL_PreMonoidal : PreMonoidalCat Types_binoidal [] := - { pmon_assoc := Types_assoc - ; pmon_cancell := Types_cancell - ; pmon_cancelr := Types_cancelr - ; pmon_assoc_rr := Types_assoc_rr - ; pmon_assoc_ll := Types_assoc_ll - }. - apply Build_Pentagon. - intros; simpl. - eapply cndr_inert. apply pl_eqv. - apply ndpc_comp. - apply ndpc_comp. - auto. - apply ndpc_comp. - apply ndpc_prod. - apply ndpc_comp. - apply ndpc_comp. - auto. - apply ndpc_comp. - auto. - auto. - auto. - auto. - auto. - auto. - apply ndpc_comp. - apply ndpc_comp. - auto. - apply ndpc_comp. - auto. - auto. - auto. - apply Build_Triangle; intros; simpl. - eapply cndr_inert. apply pl_eqv. - auto. - apply ndpc_comp. - apply ndpc_comp. - auto. - apply ndpc_comp. - auto. - auto. - auto. - eapply cndr_inert. apply pl_eqv. auto. - auto. - intros; simpl; reflexivity. - intros; simpl; reflexivity. - admit. (* assoc is central: need to add this as an obligation in ProgrammingLanguage class *) - admit. (* cancelr is central: need to add this as an obligation in ProgrammingLanguage class *) - admit. (* cancell is central: need to add this as an obligation in ProgrammingLanguage class *) - Defined. - - Definition TypesEnrichedInJudgments : SurjectiveEnrichment. - refine - {| senr_c_pm := TypesL_PreMonoidal - ; senr_v := JudgmentsL - ; senr_v_bin := Judgments_Category_binoidal _ - ; senr_v_pmon := Judgments_Category_premonoidal _ - ; senr_v_mon := Judgments_Category_monoidal _ - ; senr_c_bin := Types_binoidal - ; senr_c := TypesL - |}. - Defined. - - End LanguageCategory. + Coercion pl_eqv : ProgrammingLanguage >-> ContextND_Relation. + Coercion pl_cnd : ProgrammingLanguage >-> ContextND. End Programming_Language. -Implicit Arguments ND [ Judgment ]. + diff --git a/src/ProgrammingLanguageArrow.v b/src/ProgrammingLanguageArrow.v index 5dbce6d..5386d3e 100644 --- a/src/ProgrammingLanguageArrow.v +++ b/src/ProgrammingLanguageArrow.v @@ -30,7 +30,7 @@ Require Import FunctorCategories_ch7_7. Require Import NaturalDeduction. Require Import NaturalDeductionCategory. -Require Import ProgrammingLanguage. +Require Import ProgrammingLanguageCategory. Require Import FreydCategories. Require Import Enrichments. Require Import GeneralizedArrow. diff --git a/src/ProgrammingLanguageCategory.v b/src/ProgrammingLanguageCategory.v new file mode 100644 index 0000000..d415a35 --- /dev/null +++ b/src/ProgrammingLanguageCategory.v @@ -0,0 +1,650 @@ +(*********************************************************************************************************************************) +(* ProgrammingLanguageCategory *) +(* *) +(* The category Types(L) *) +(* *) +(*********************************************************************************************************************************) + +Generalizable All Variables. +Require Import Preamble. +Require Import General. +Require Import Categories_ch1_3. +Require Import InitialTerminal_ch2_2. +Require Import Functors_ch1_4. +Require Import Isomorphisms_ch1_5. +Require Import ProductCategories_ch1_6_1. +Require Import OppositeCategories_ch1_6_2. +Require Import Enrichment_ch2_8. +Require Import Subcategories_ch7_1. +Require Import NaturalTransformations_ch7_4. +Require Import NaturalIsomorphisms_ch7_5. +Require Import BinoidalCategories. +Require Import PreMonoidalCategories. +Require Import MonoidalCategories_ch7_8. +Require Import Coherence_ch7_8. +Require Import Enrichment_ch2_8. +Require Import RepresentableStructure_ch7_2. +Require Import FunctorCategories_ch7_7. + +Require Import NaturalDeduction. +Require Import ProgrammingLanguage. + Export ProgrammingLanguage. + +Require Import NaturalDeductionCategory. + +Open Scope nd_scope. +(* I am at a loss to explain why "auto" can't handle this *) +Ltac ndpc_tac := + match goal with + | [ |- NDPredicateClosure ?P (?A ;; ?B) ] => apply ndpc_comp; ndpc_tac + | [ |- NDPredicateClosure ?P (?A ** ?B) ] => apply ndpc_prod; ndpc_tac + | _ => auto + end. + +(* this tactical searches the environment; setoid_rewrite doesn't seem to be able to do that properly sometimes *) +Ltac nd_swap_ltac P EQV := + match goal with + [ |- context [ (?F ** nd_id _) ;; (nd_id _ ** ?G) ] ] => + set (@nd_swap _ _ EQV _ _ _ _ F G) as P + end. + +(* I still wish I knew why "Hint Constructors" doesn't work *) +Hint Extern 5 => apply snd_inert_initial. +Hint Extern 5 => apply snd_inert_cut. +Hint Extern 5 => apply snd_inert_structural. +Hint Extern 5 => apply cnd_inert_initial. +Hint Extern 5 => apply cnd_inert_cut. +Hint Extern 5 => apply cnd_inert_structural. +Hint Extern 5 => apply cnd_inert_cnd_ant_assoc. +Hint Extern 5 => apply cnd_inert_cnd_ant_cossa. +Hint Extern 5 => apply cnd_inert_cnd_ant_cancell. +Hint Extern 5 => apply cnd_inert_cnd_ant_cancelr. +Hint Extern 5 => apply cnd_inert_cnd_ant_llecnac. +Hint Extern 5 => apply cnd_inert_cnd_ant_rlecnac. +Hint Extern 5 => apply cnd_inert_se_expand_left. +Hint Extern 5 => apply cnd_inert_se_expand_right. + +Hint Extern 2 (@Structural _ _ _ _ (@nd_id _ _ [] )) => simpl; auto. +Hint Extern 2 (@Structural _ _ _ _ (@nd_id _ _ [ _ ])) => simpl; auto. + +Section ProgrammingLanguageCategory. + + Context {T : Type}. (* types of the language *) + + Context {Rule : Tree ??(@PLJudg T) -> Tree ??(@PLJudg T) -> Type}. + Notation "cs |= ss" := (@sequent T cs ss) : pl_scope. + + Notation "H /⋯⋯/ C" := (ND Rule H C) : pl_scope. + + Open Scope pf_scope. + Open Scope nd_scope. + Open Scope pl_scope. + + Context (PL:@ProgrammingLanguage T Rule). + + (* category of judgments in a fixed type/coercion context *) + Definition Judgments_cartesian := @Judgments_Category_CartesianCat _ Rule pl_eqv. + + Definition JudgmentsL := Judgments_cartesian. + + Definition identityProof t : [] ~~{JudgmentsL}~~> [t |= t]. + unfold hom; simpl. + apply snd_initial. + Defined. + + Definition cutProof a b c : [a |= b],,[b |= c] ~~{JudgmentsL}~~> [a |= c]. + unfold hom; simpl. + apply snd_cut. + Defined. + + Instance TypesL : ECategory JudgmentsL (Tree ??T) (fun x y => [x|=y]) := + { eid := identityProof + ; ecomp := cutProof + }. + intros; apply (mon_commutative(MonoidalCat:=JudgmentsL)). + intros; apply (mon_commutative(MonoidalCat:=JudgmentsL)). + abstract (intros; unfold identityProof; unfold cutProof; simpl; eapply cndr_inert; auto; apply PL). + abstract (intros; unfold identityProof; unfold cutProof; simpl; eapply cndr_inert; auto; apply PL). + abstract (intros; unfold identityProof; unfold cutProof; simpl; eapply cndr_inert; + [ apply PL | idtac | idtac ]; apply ndpc_comp; auto). + Defined. + + Instance Types_first c : EFunctor TypesL TypesL (fun x => x,,c ) := + { efunc := fun x y => cnd_expand_right(ContextND:=pl_cnd) x y c }. + intros; apply (mon_commutative(MonoidalCat:=JudgmentsL)). + abstract (intros; simpl; apply (cndr_inert pl_cnd); auto). + abstract (intros; unfold ehom; unfold comp; simpl; unfold cutProof; + rewrite <- (@ndr_prod_preserves_comp _ _ PL _ _ (cnd_expand_right _ _ c) _ _ (nd_id1 (b|=c0)) + _ (nd_id1 (a,,c |= b,,c)) _ (cnd_expand_right _ _ c)); + setoid_rewrite (@ndr_comp_right_identity _ _ PL _ [a,, c |= b,, c]); + setoid_rewrite (@ndr_comp_left_identity _ _ PL [b |= c0]); + simpl; eapply cndr_inert; [ apply PL | auto | auto ]). + Defined. + + Instance Types_second c : EFunctor TypesL TypesL (fun x => c,,x) := + { efunc := fun x y => ((@cnd_expand_left _ _ _ _ _ _ x y c)) }. + intros; apply (mon_commutative(MonoidalCat:=JudgmentsL)). + abstract (intros; simpl; apply (cndr_inert pl_cnd); auto). + intros; unfold ehom; unfold comp; simpl; unfold cutProof; + abstract (rewrite <- (@ndr_prod_preserves_comp _ _ PL _ _ (cnd_expand_left _ _ c) _ _ (nd_id1 (b|=c0)) + _ (nd_id1 (c,,a |= c,,b)) _ (cnd_expand_left _ _ c)); + setoid_rewrite (@ndr_comp_right_identity _ _ PL _ [c,,a |= c,,b]); + setoid_rewrite (@ndr_comp_left_identity _ _ PL [b |= c0]); + simpl; eapply cndr_inert; [ apply PL | auto | auto ]). + Defined. + + Instance Types_binoidal : EBinoidalCat TypesL (@T_Branch _) := + { ebc_first := Types_first + ; ebc_second := Types_second + }. + + Instance Types_assoc_iso a b c : Isomorphic(C:=TypesL) ((a,,b),,c) (a,,(b,,c)) := + { iso_forward := snd_initial _ ;; cnd_ant_cossa _ a b c + ; iso_backward := snd_initial _ ;; cnd_ant_assoc _ a b c + }. + abstract (simpl; eapply cndr_inert; unfold identityProof; [ apply PL | idtac | idtac ]; ndpc_tac). + abstract (simpl; eapply cndr_inert; unfold identityProof; [ apply PL | idtac | idtac ]; ndpc_tac). + Defined. + + Instance Types_cancelr_iso a : Isomorphic(C:=TypesL) (a,,[]) a := + { iso_forward := snd_initial _ ;; cnd_ant_rlecnac _ a + ; iso_backward := snd_initial _ ;; cnd_ant_cancelr _ a + }. + abstract (simpl; eapply cndr_inert; unfold identityProof; [ apply PL | idtac | idtac ]; ndpc_tac). + abstract (simpl; eapply cndr_inert; unfold identityProof; [ apply PL | idtac | idtac ]; ndpc_tac). + Defined. + + Instance Types_cancell_iso a : Isomorphic(C:=TypesL) ([],,a) a := + { iso_forward := snd_initial _ ;; cnd_ant_llecnac _ a + ; iso_backward := snd_initial _ ;; cnd_ant_cancell _ a + }. + abstract (simpl; eapply cndr_inert; unfold identityProof; [ apply PL | idtac | idtac ]; ndpc_tac). + abstract (simpl; eapply cndr_inert; unfold identityProof; [ apply PL | idtac | idtac ]; ndpc_tac). + Defined. + + Lemma coincide' : nd_llecnac === nd_rlecnac. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + apply qq. + Qed. + + Lemma Types_assoc_lemma : ∀a b X Y : TypesL, + ∀f : X ~~{ TypesL }~~> Y, + #(Types_assoc_iso a X b) >>> (Types_first b >>>> Types_second a) \ f ~~ + (Types_second a >>>> Types_first b) \ f >>> #(Types_assoc_iso a Y b). + intros. + Opaque nd_id. + simpl. + Transparent nd_id. + unfold ehom. + + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) f) as q. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) f) as q. + simpl in q. + setoid_rewrite coincide' in q. + setoid_rewrite <- q. + clear q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects; try reflexivity. + + apply (cndr_inert pl_cnd); auto; ndpc_tac; auto. + Qed. + + Instance Types_assoc a b : Types_second a >>>> Types_first b <~~~> Types_first b >>>> Types_second a := + { ni_iso := fun c => Types_assoc_iso a c b }. + apply Types_assoc_lemma. + Defined. + + Lemma Types_assoc_ll_lemma : + ∀a b X Y : TypesL, + ∀f : X ~~{ TypesL }~~> Y, + #(Types_assoc_iso a b X) >>> (Types_second b >>>> Types_second a) \ f ~~ + Types_second (a,, b) \ f >>> #(Types_assoc_iso a b Y). + + intros. + Opaque nd_id. + simpl. + Transparent nd_id. + unfold ehom. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) f) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + + clear q. + set (ni_commutes' (jud_mon_cancell PL) f) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects; try reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + Qed. + + Instance Types_assoc_ll a b : Types_second (a,,b) <~~~> Types_second b >>>> Types_second a := + { ni_iso := fun c => Types_assoc_iso a b c }. + apply Types_assoc_ll_lemma. + Defined. + + Lemma Types_assoc_rr_lemma : + ∀a b A B : TypesL, + ∀f : A ~~{ TypesL }~~> B, + #(Types_assoc_iso A a b) ⁻¹ >>> (Types_first a >>>> Types_first b) \ f ~~ + Types_first (a,, b) \ f >>> #(Types_assoc_iso B a b) ⁻¹. + intros. + Opaque nd_id. + simpl. + Transparent nd_id. + + rename A into X. + rename B into Y. + unfold ehom. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) f) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + + clear q. + set (ni_commutes' (jud_mon_cancell PL) f) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects; try reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + Qed. + + Instance Types_assoc_rr a b : Types_first (a,,b) <~~~> Types_first a >>>> Types_first b := + { ni_iso := fun c => iso_inv _ _ (Types_assoc_iso c a b) }. + apply Types_assoc_rr_lemma. + Defined. + + Lemma Types_cancelr_lemma : + ∀A B : TypesL, + ∀f : A ~~{ TypesL }~~> B, + #(Types_cancelr_iso A) >>> functor_id TypesL \ f ~~ + Types_first [] \ f >>> #(Types_cancelr_iso B). + intros. + Opaque nd_id. + simpl. + unfold ehom. + nd_swap_ltac p PL. + setoid_rewrite p. + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) f) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) f) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects; try reflexivity. + Transparent nd_id. + simpl. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + Qed. + + Instance Types_cancelr : Types_first [] <~~~> functor_id _ := + { ni_iso := Types_cancelr_iso }. + apply Types_cancelr_lemma. + Defined. + + Lemma Types_cancell_lemma : + ∀A B : TypesL, + ∀f : A ~~{ TypesL }~~> B, + #(Types_cancell_iso A) >>> functor_id TypesL \ f ~~ + Types_second [] \ f >>> #(Types_cancell_iso B). + intros. + Opaque nd_id. + simpl. + unfold ehom. + nd_swap_ltac p PL. + setoid_rewrite p. + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) f) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) f) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + + apply ndr_comp_respects; try reflexivity. + Transparent nd_id. + simpl. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + Qed. + + Instance Types_cancell : Types_second [] <~~~> functor_id _ := + { ni_iso := Types_cancell_iso }. + apply Types_cancell_lemma. + Defined. + + Lemma TypesL_assoc_central a b c : CentralMorphism(H:=Types_binoidal) #((Types_assoc a b) c). + intros. + apply Build_CentralMorphism. + intros. + unfold bin_obj. + unfold ebc_bobj. + Opaque nd_id. + simpl. + unfold ehom. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) g) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) g) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects. + reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + + Opaque nd_id. + intros. + unfold bin_obj. + unfold ebc_bobj. + simpl. + unfold ehom. + symmetry. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) g) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) g) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects. + reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + Qed. + + Lemma TypesL_cancell_central a : CentralMorphism(H:=Types_binoidal) #(Types_cancell a). + intros. + apply Build_CentralMorphism. + Opaque nd_id. + intros. + unfold bin_obj. + unfold ebc_bobj. + simpl. + unfold ehom. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) g) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) g) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects. + reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + + Opaque nd_id. + intros. + unfold bin_obj. + unfold ebc_bobj. + simpl. + unfold ehom. + symmetry. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) g) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) g) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects. + reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + Qed. + + Lemma TypesL_cancelr_central a : CentralMorphism(H:=Types_binoidal) #(Types_cancelr a). + intros. + apply Build_CentralMorphism. + Opaque nd_id. + intros. + unfold bin_obj. + unfold ebc_bobj. + simpl. + unfold ehom. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) g) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) g) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects. + reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + + Opaque nd_id. + intros. + unfold bin_obj. + unfold ebc_bobj. + simpl. + unfold ehom. + symmetry. + nd_swap_ltac p PL. + setoid_rewrite p. + clear p. + setoid_rewrite (@nd_prod_split_left _ Rule PL _ _ _ []). + setoid_rewrite (@nd_prod_split_right _ Rule PL _ _ _ []). + repeat setoid_rewrite (@ndr_comp_associativity _ Rule PL). + setoid_rewrite <- (@ndr_comp_associativity _ Rule PL). + + set (ni_commutes' (jud_mon_cancelr PL) g) as q. + Opaque nd_id. + simpl in q. + setoid_rewrite <- q. + clear q. + + set (ni_commutes' (jud_mon_cancell PL) g) as q. + simpl in q. + set (coincide (pmon_triangle(PreMonoidalCat:=(Judgments_Category_premonoidal PL)))) as q'. + set (isos_forward_equal_then_backward_equal _ _ q') as qq. + simpl in qq. + setoid_rewrite qq in q. + clear q' qq. + setoid_rewrite <- q. + + setoid_rewrite (@ndr_comp_associativity _ Rule PL). + apply ndr_comp_respects. + reflexivity. + + Transparent nd_id. + apply (cndr_inert pl_cnd); auto; ndpc_tac. + Qed. + + Instance TypesL_PreMonoidal : PreMonoidalCat Types_binoidal [] := + { pmon_assoc := Types_assoc + ; pmon_cancell := Types_cancell + ; pmon_cancelr := Types_cancelr + ; pmon_assoc_rr := Types_assoc_rr + ; pmon_assoc_ll := Types_assoc_ll + }. + abstract (apply Build_Pentagon; intros; simpl; eapply cndr_inert; try apply PL; ndpc_tac). + abstract (apply Build_Triangle; intros; simpl; eapply cndr_inert; try apply PL; ndpc_tac). + intros; simpl; reflexivity. + intros; simpl; reflexivity. + apply TypesL_assoc_central. + apply TypesL_cancelr_central. + apply TypesL_cancell_central. + Defined. + +End ProgrammingLanguageCategory. + diff --git a/src/ProgrammingLanguageEnrichment.v b/src/ProgrammingLanguageEnrichment.v new file mode 100644 index 0000000..332a8ba --- /dev/null +++ b/src/ProgrammingLanguageEnrichment.v @@ -0,0 +1,52 @@ +(*********************************************************************************************************************************) +(* ProgrammingLanguageEnrichment *) +(* *) +(* Types are enriched in Judgments. *) +(* *) +(*********************************************************************************************************************************) + +Generalizable All Variables. +Require Import Preamble. +Require Import General. +Require Import Categories_ch1_3. +Require Import InitialTerminal_ch2_2. +Require Import Functors_ch1_4. +Require Import Isomorphisms_ch1_5. +Require Import ProductCategories_ch1_6_1. +Require Import OppositeCategories_ch1_6_2. +Require Import Enrichment_ch2_8. +Require Import Subcategories_ch7_1. +Require Import NaturalTransformations_ch7_4. +Require Import NaturalIsomorphisms_ch7_5. +Require Import BinoidalCategories. +Require Import PreMonoidalCategories. +Require Import MonoidalCategories_ch7_8. +Require Import Coherence_ch7_8. +Require Import Enrichment_ch2_8. +Require Import RepresentableStructure_ch7_2. +Require Import FunctorCategories_ch7_7. + +Require Import Enrichments. +Require Import NaturalDeduction. +Require Import NaturalDeductionCategory. +Require Import ProgrammingLanguageCategory. + Export ProgrammingLanguageCategory. + +Section ProgrammingLanguageEnrichment. + + Context `(PL:ProgrammingLanguage). + + Definition TypesEnrichedInJudgments : SurjectiveEnrichment. + refine + {| senr_c_pm := TypesL_PreMonoidal PL + ; senr_v := JudgmentsL PL + ; senr_v_bin := Judgments_Category_binoidal _ + ; senr_v_pmon := Judgments_Category_premonoidal _ + ; senr_v_mon := Judgments_Category_monoidal _ + ; senr_c_bin := Types_binoidal PL + ; senr_c := TypesL PL + |}. + Defined. + +End ProgrammingLanguageEnrichment. + diff --git a/src/ProgrammingLanguageFlattening.v b/src/ProgrammingLanguageFlattening.v index 4438dd2..4c2a66b 100644 --- a/src/ProgrammingLanguageFlattening.v +++ b/src/ProgrammingLanguageFlattening.v @@ -27,7 +27,7 @@ Require Import Reification. Require Import NaturalDeduction. Require Import NaturalDeductionCategory. Require Import GeneralizedArrow. -Require Import ProgrammingLanguage. +Require Import ProgrammingLanguageEnrichment. Require Import ProgrammingLanguageReification. Require Import SectionRetract_ch2_4. Require Import GeneralizedArrowFromReification. diff --git a/src/ProgrammingLanguageGeneralizedArrow.v b/src/ProgrammingLanguageGeneralizedArrow.v index bed54b6..8fe0391 100644 --- a/src/ProgrammingLanguageGeneralizedArrow.v +++ b/src/ProgrammingLanguageGeneralizedArrow.v @@ -30,7 +30,7 @@ Require Import NaturalDeductionCategory. Require Import Enrichments. Require Import Reification. Require Import GeneralizedArrow. -Require Import ProgrammingLanguage. +Require Import ProgrammingLanguageEnrichment. Section ProgrammingLanguageGeneralizedArrow. diff --git a/src/ProgrammingLanguageReification.v b/src/ProgrammingLanguageReification.v index 932c517..c1a06d8 100644 --- a/src/ProgrammingLanguageReification.v +++ b/src/ProgrammingLanguageReification.v @@ -28,6 +28,7 @@ Require Import Reification. Require Import NaturalDeduction. Require Import NaturalDeductionCategory. Require Import ProgrammingLanguage. +Require Import ProgrammingLanguageCategory. Require Import Enrichments. Section ProgrammingLanguageReification. diff --git a/src/categories b/src/categories index 0ecd73c..422dab8 160000 --- a/src/categories +++ b/src/categories @@ -1 +1 @@ -Subproject commit 0ecd73c172f67634fa956fb52b332e6effb5a04d +Subproject commit 422dab8d300548c294b95c0f4bbf27aecadbd745