code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE ViewPatterns, ScopedTypeVariables, FlexibleContexts #-}
module Language.SecreC.TypeChecker.Statement where
import Language.SecreC.Vars
import Language.SecreC.Utils as Utils
import Language.SecreC.Monad
import Language.SecreC.Syntax
import Language.SecreC.Parser.Tokens
import Language.SecreC.Pretty
import Language.SecreC.Location
import Language.SecreC.Position
import Language.SecreC.Error
import Language.SecreC.TypeChecker.Base
import {-# SOURCE #-} Language.SecreC.TypeChecker.Expression
import {-# SOURCE #-} Language.SecreC.TypeChecker.Type
import {-# SOURCE #-} Language.SecreC.TypeChecker.Constraint
import Language.SecreC.Prover.Base
import Language.SecreC.TypeChecker.Environment
import Data.Bifunctor
import Data.Traversable
import qualified Data.Foldable as Foldable
import Data.Set (Set(..))
import qualified Data.Set as Set
import Data.Map (Map(..))
import qualified Data.Map as Map
import Data.Int
import Data.Word
import Data.Maybe
import Safe
import Text.PrettyPrint hiding (equals)
import Control.Monad hiding (mapM)
import Control.Monad.IO.Class
import Control.Monad.State.Strict as State
import Control.Monad.Reader as Reader
import Prelude hiding (mapM)
-- | Left-biased merge of two @StmtClass@es
extendStmtClasses :: Set StmtClass -> Set StmtClass -> Set StmtClass
extendStmtClasses s1 s2 = (Set.filter (not . isStmtFallthru) s1) `Set.union` s2
tcStmtBlock :: ProverK loc m => loc -> String -> TcM m a -> TcM m a
tcStmtBlock l msg m = tcProgress (Just $ locpos l) (Just msg) $ do
doResolve <- getDoResolve
debugTc $ liftIO $ putStrLn $ "tcStmtBlock " ++ pprid (locpos l) ++ " " ++ show doResolve
if doResolve
then tcNew (locpos l) msg $ do
x <- m
solveTop l msg
return x
else tcAddDeps l msg m
tcStmtsRet :: ProverK loc m => loc -> Type -> [Statement Identifier loc] -> TcM m [Statement GIdentifier (Typed loc)]
tcStmtsRet l ret ss = do
(ss',StmtType st) <- tcStmts ret ss
isReturnStmt l st ret
return ss'
isReturnStmt :: (ProverK loc m) => loc -> Set StmtClass -> Type -> TcM m ()
isReturnStmt l cs ret = do
ppret <- pp ret
ppcs <- pp cs
addErrorM l (\err -> TypecheckerError (locpos l) $ NoReturnStatement (ppret <+> ppcs) $ Just err) $ mapM_ aux $ Set.toList cs
where
aux StmtReturn = return ()
aux (StmtFallthru t) = do
pp1 <- pp ret
pp2 <- pp t
addErrorM l (TypecheckerError (locpos l) . (EqualityException "expression") (pp1) (pp2) . Just) $
tcCstrM_ l $ Unifies ret t
aux x = genTcError (locpos l) False $ text "Unexpected return class"
tcStmts :: (ProverK loc m) => Type -> [Statement Identifier loc] -> TcM m ([Statement GIdentifier (Typed loc)],Type)
tcStmts ret [] = return ([],StmtType $ Set.singleton $ StmtFallthru $ ComplexT Void)
tcStmts ret [s] = do
(s',StmtType c) <- tcStmtBlock (loc s) "single statement" $ tcStmt ret s
return ([s'],StmtType c)
tcStmts ret (s:ss) = do
(s',StmtType c) <- tcStmtBlock (loc s) "middle statement" $ tcStmt ret s
-- if the following statements are never executed, issue an error
case ss of
[] -> return ()
ss -> unless (hasStmtFallthru c) $ do
ppss <- mapM pp ss
tcError (locpos $ loc (head ss)) $ UnreachableDeadCode (vcat ppss)
(ss',StmtType cs) <- tcStmts ret ss
sBvs <- liftM (Map.keysSet . Map.filter (\b -> not b)) $ bvs $ bimap mkVarId (const ()) s
ssFvs <- fvsSet $ map (bimap mkVarId (const ())) ss
-- issue warning for unused variable declarations
forSetM_ (sBvs `Set.difference` ssFvs) $ \(v::VarIdentifier) -> do
ppv <- pp v
tcWarn (locpos $ loc s) $ UnusedVariable (ppv)
return (s':ss',StmtType $ extendStmtClasses c cs)
-- | Typecheck a non-empty statement
tcNonEmptyStmt :: (ProverK loc m) => Type -> Statement Identifier loc -> TcM m (Statement GIdentifier (Typed loc),Type)
tcNonEmptyStmt ret s = do
r@(s',StmtType cs) <- tcStmtBlock (loc s) "non-empty statement" $ tcStmt ret s
when (Set.null cs) $ do
pps <- pp s
tcWarn (locpos $ loc s) $ EmptyBranch (pps)
return r
-- | Typecheck a statement in the body of a loop
tcLoopBodyStmt :: (ProverK loc m) => Type -> loc -> Statement Identifier loc -> TcM m (Statement GIdentifier (Typed loc),Type)
tcLoopBodyStmt ret l s = do
(s',StmtType cs) <- tcStmtBlock l "loop body statement" $ tcStmt ret s
-- check that the body can perform more than iteration
when (Set.null $ Set.filter isIterationStmtClass cs) $ do
pps <- pp s
tcWarn (locpos l) $ SingleIterationLoop (pps)
-- return the @StmtClass@ for the whole loop
let t' = StmtType $ Set.insert (StmtFallthru $ ComplexT Void) (Set.filter (\c -> not (isLoopStmtClass c) && not (isStmtFallthru c)) cs)
return (s',t')
-- | Typechecks a @Statement@
tcStmt :: (ProverK loc m) => Type -- ^ return type
-> Statement Identifier loc -- ^ input statement
-> TcM m (Statement GIdentifier (Typed loc),Type)
tcStmt ret (CompoundStatement l s) = tcStmtBlock l "compound statement" $ do
(ss',t) <- tcLocal l "tcStmt compound" $ tcStmts ret s
return (CompoundStatement (Typed l t) ss',t)
tcStmt ret (IfStatement l condE thenS Nothing) = tcStmtBlock l "if statement" $ do
condE' <- tcStmtBlock l "if statement guard" $ tcGuard condE
(thenS',StmtType cs) <- tcLocal l "tcStmt if then" $ tcNonEmptyStmt ret thenS
-- an if statement falls through if the condition is not satisfied
let t = StmtType $ Set.insert (StmtFallthru $ ComplexT Void) cs
return (IfStatement (notTyped "tcStmt" l) condE' thenS' Nothing,t)
tcStmt ret (IfStatement l condE thenS (Just elseS)) = tcStmtBlock l "if statement" $ do
condE' <- tcStmtBlock l "if statement" $ tcGuard condE
(thenS',StmtType cs1) <- tcLocal l "tcStmt if then" $ tcNonEmptyStmt ret thenS
(elseS',StmtType cs2) <- tcLocal l "tcStmt if else" $ tcNonEmptyStmt ret elseS
let t = StmtType $ cs1 `Set.union` cs2
return (IfStatement (notTyped "tcStmt" l) condE' thenS' (Just elseS'),t)
tcStmt ret (ForStatement l startE whileE incE ann bodyS) = tcStmtBlock l "for statement" $ tcLocal l "tcStmt for" $ do
((startE',whileE',incE',ann',bodyS',t'),rs,ws) <- withDecClassVars $ do
startE' <- tcStmtBlock l "for statement" $ tcForInitializer startE
whileE' <- tcStmtBlock l "for statement guard" $ mapM (tcGuard) whileE
incE' <- withExprC ReadWriteExpr $ tcStmtBlock l "for statement increment" $ mapM (tcExpr Nothing) incE
ann' <- mapM tcLoopAnn ann
(bodyS',t') <- tcLocal l "tcStmt for body" $ tcLoopBodyStmt ret l bodyS
return (startE',whileE',incE',ann',bodyS',t')
debugTc $ do
ppl <- ppr l
pprs <- pp rs
ppws <- pp ws
locals <- State.gets (Map.keys . localVars)
pplocals <- pp locals
liftIO $ putStrLn $ "whileT " ++ ppl ++ ": " ++ show pprs ++ " : " ++ show ppws ++ "\n" ++ show pplocals
return (ForStatement (Typed l $ WhileT rs ws) startE' whileE' incE' ann' bodyS',t')
tcStmt ret (WhileStatement l condE ann bodyS) = tcStmtBlock l "while statement" $ do
((ann',condE',bodyS',t'),rs,ws) <- withDecClassVars $ do
ann' <- mapM tcLoopAnn ann
condE' <- tcStmtBlock l "while statement" $ tcGuard condE
(bodyS',t') <- tcLocal l "tcStmt while body" $ tcLoopBodyStmt ret l bodyS
return (ann',condE',bodyS',t')
debugTc $ do
ppl <- ppr l
pprs <- pp rs
ppws <- pp ws
liftIO $ putStrLn $ "whileT " ++ ppl ++ ": " ++ show pprs ++ " : " ++ show ppws
return (WhileStatement (Typed l $ WhileT rs ws) condE' ann' bodyS',t')
tcStmt ret (PrintStatement (l::loc) argsE) = tcStmtBlock l "print statement" $ do
argsE' <- withExprC ReadOnlyExpr $ mapM (tcVariadicArg (tcExpr Nothing)) argsE
xs <- forM argsE' $ \argE' -> do
tx <- newTyVar True False Nothing
pparg <- ppVariadicArg pp argE'
newTypedVar "print" tx False $ Just pparg
topTcCstrM_ l $ SupportedPrint (map (\(x,y) -> (False,Left $ fmap typed x,y)) argsE') xs
let exs = map (fmap (Typed l) . varExpr) xs
let t = StmtType $ Set.singleton $ StmtFallthru $ ComplexT Void
return (PrintStatement (Typed l t) (zip exs $ map snd argsE'),t)
tcStmt ret (DowhileStatement l ann bodyS condE) = tcStmtBlock l "do while statement" $ tcLocal l "tcStmt dowhile" $ do
((ann',bodyS',t',condE'),rs,ws) <- withDecClassVars $ do
ann' <- mapM tcLoopAnn ann
(bodyS',t') <- tcLoopBodyStmt ret l bodyS
condE' <- tcGuard condE
return (ann',bodyS',t',condE')
debugTc $ do
ppl <- ppr l
pprs <- pp rs
ppws <- pp ws
liftIO $ putStrLn $ "whileT " ++ ppl ++ ": " ++ show pprs ++ " : " ++ show ppws
return (DowhileStatement (Typed l $ WhileT rs ws) ann' bodyS' condE',t')
tcStmt ret (AssertStatement l argE) = tcStmtBlock l "assert statement" $ do
(argE',cstrsargE) <- tcWithCstrs l "assert" $ tcGuard argE
opts <- askOpts
when (checkAssertions opts) $ topCheckCstrM_ l cstrsargE $ CheckAssertion $ fmap typed argE'
tryAddHypothesis l "tcStmt assert" LocalScope checkAssertions cstrsargE $ HypCondition $ fmap typed argE'
let t = StmtType $ Set.singleton $ StmtFallthru $ ComplexT Void
return (AssertStatement (notTyped "tcStmt" l) argE',t)
tcStmt ret (SyscallStatement l n args) = tcStmtBlock l "syscall statement" $ do
args' <- mapM tcSyscallParam args
let t = StmtType $ Set.singleton $ StmtFallthru $ ComplexT Void
isSupportedSyscall l n $ map (typed . loc) args'
return (SyscallStatement (Typed l t) n args',t)
tcStmt ret (VarStatement l decl) = tcStmtBlock l "var statement" $ do
decl' <- tcVarDecl LocalScope decl
let t = StmtType (Set.singleton $ StmtFallthru $ ComplexT Void)
return (VarStatement (notTyped "tcStmt" l) decl',t)
tcStmt ret (ReturnStatement l Nothing) = tcStmtBlock l "return statement" $ do
topTcCstrM_ l $ Unifies (ComplexT Void) ret
let t = StmtType (Set.singleton $ StmtReturn)
let ret = ReturnStatement (Typed l t) Nothing
return (ret,t)
tcStmt ret (ReturnStatement l (Just e)) = tcStmtBlock l "return statement" $ do
e' <- withExprC ReadWriteExpr $ tcExpr Nothing e
let et' = typed $ loc e'
ppe <- pp e
x <- tcCoerces l True Nothing (fmap typed e') ret
let t = StmtType (Set.singleton $ StmtReturn)
let ex = fmap (Typed l) x
let ret = ReturnStatement (Typed l t) (Just ex)
return (ret,t)
tcStmt ret (ContinueStatement l) = tcStmtBlock l "continue statement" $ do
let t = StmtType (Set.singleton StmtContinue)
return (BreakStatement $ Typed l t,t)
tcStmt ret (BreakStatement l) = tcStmtBlock l "break statement" $ do
let t = StmtType (Set.singleton StmtBreak)
return (BreakStatement $ Typed l t,t)
tcStmt ret (ExpressionStatement l e) = tcStmtBlock l "expression statement" $ do
e' <- withExprC ReadWriteExpr $ tcExpr Nothing e
let te = typed $ loc e'
--case e of
-- BinaryAssign {} -> return ()
-- otherwise -> topTcCstrM_ l $ Unifies te (ComplexT Void)
let t = StmtType (Set.singleton $ StmtFallthru te)
return (ExpressionStatement (Typed l t) e',t)
tcStmt ret (AnnStatement l ann) = tcStmtBlock l "annotation statement" $ do
(ann') <- mapM tcStmtAnn ann
let t = StmtType $ Set.singleton $ StmtFallthru $ ComplexT Void
return (AnnStatement (Typed l t) ann',t)
tcStmt ret qs@(QuantifiedStatement l q vs anns s) = tcStmtBlock l "quantified statement" $ do
ppqs <- pp qs
onlyAnn l ppqs $ tcLocal l "tcStmt quant" $ do
q' <- tcQuantifier q
vs' <- mapM (tcQVar l) vs
mapM_ checkEnsures anns
anns' <- tcProcedureAnns False anns
(s',t) <- tcStmts (ComplexT Void) s
topTcCstrM_ l $ Unifies (ComplexT Void) ret
return (QuantifiedStatement (Typed l t) q' vs' anns' s',t)
checkEnsures :: (PP (TcM m) iden,ProverK loc m) => ProcedureAnnotation iden loc -> TcM m ()
checkEnsures (EnsuresAnn {}) = return ()
checkEnsures ann = do
ppann <- pp ann
genTcError (locpos $ loc ann) False $ text "unsupported annotation in quantified statement: " <+> ppann
tcLoopAnn :: ProverK loc m => LoopAnnotation Identifier loc -> TcM m (LoopAnnotation GIdentifier (Typed loc))
tcLoopAnn (DecreasesAnn l isFree e) = tcStmtBlock l "decreases annotation" $ insideAnnotation $ withKind FKind $ withLeak False $ do
(e') <- tcAnnExpr Nothing e
return $ DecreasesAnn (Typed l $ typed $ loc e') isFree e'
tcLoopAnn (InvariantAnn l isFree isLeak e) = tcStmtBlock l "invariant annotation" $ insideAnnotation $ do
(isLeak',e') <- checkLeakMb l isLeak $ withKind FKind $ tcAnnGuard e
return $ InvariantAnn (Typed l $ typed $ loc e') isFree isLeak' e'
tcStmtAnn :: (ProverK loc m) => StatementAnnotation Identifier loc -> TcM m (StatementAnnotation GIdentifier (Typed loc))
tcStmtAnn (AssumeAnn l isLeak e) = tcStmtBlock l "annotation statement" $ insideAnnotation $ do
(isLeak',e') <- checkLeakMb l isLeak $ withKind FKind $ tcAnnGuard e
return $ AssumeAnn (Typed l $ typed $ loc e') isLeak e'
tcStmtAnn (AssertAnn l isLeak e) = tcStmtBlock l "annotation statement" $ insideAnnotation $ do
(isLeak',e') <- checkLeakMb l isLeak $ withKind FKind $ tcAnnGuard e
return $ AssertAnn (Typed l $ typed $ loc e') isLeak' e'
tcStmtAnn (EmbedAnn l isLeak e) = tcStmtBlock l "annotation statement" $ insideAnnotation $ do
(isLeak',(e',t)) <- checkLeakMb l isLeak $ withKind PKind $ tcStmt (ComplexT Void) e
return $ EmbedAnn (Typed l t) isLeak' e'
isSupportedSyscall :: (Monad m,Location loc) => loc -> Identifier -> [Type] -> TcM m ()
isSupportedSyscall l n args = return () -- TODO: check specific syscalls?
tcSyscallParam :: (ProverK loc m) => SyscallParameter Identifier loc -> TcM m (SyscallParameter GIdentifier (Typed loc))
tcSyscallParam (SyscallPush l e) = do
e' <- withExprC ReadWriteExpr $ tcVariadicArg (tcExpr Nothing) e
let t = SysT $ SysPush $ typed $ loc $ fst e'
return $ SyscallPush (Typed l t) e'
tcSyscallParam (SyscallReturn l v) = do
v' <- tcVarName False v
let t = SysT $ SysRet $ typed $ loc v'
return $ SyscallReturn (Typed l t) v'
tcSyscallParam (SyscallPushRef l v) = do
v' <- tcVarName False v
let t = SysT $ SysRef $ typed $ loc v'
return $ SyscallPushRef (Typed l t) v'
tcSyscallParam (SyscallPushCRef l e) = do
e' <- withExprC ReadWriteExpr $ tcExpr Nothing e
let t = SysT $ SysCRef $ typed $ loc e'
return $ SyscallPushCRef (Typed l t) e'
tcForInitializer :: (ProverK loc m) => ForInitializer Identifier loc -> TcM m (ForInitializer GIdentifier (Typed loc))
tcForInitializer (InitializerExpression Nothing) = return $ InitializerExpression Nothing
tcForInitializer (InitializerExpression (Just e)) = do
e' <- withExprC ReadWriteExpr $ tcExpr Nothing e
return $ InitializerExpression $ Just e'
tcForInitializer (InitializerVariable vd) = do
vd' <- tcVarDecl LocalScope vd
return $ InitializerVariable vd'
tcVarDecl :: (ProverK loc m) => Scope -> VariableDeclaration Identifier loc -> TcM m (VariableDeclaration GIdentifier (Typed loc))
tcVarDecl scope (VariableDeclaration l isConst isHavoc tyspec vars) = do
(tyspec') <- tcTypeSpec tyspec False False
let ty = typed $ loc tyspec'
(vars') <- mapM (tcVarInit isConst isHavoc scope ty) vars
return (VariableDeclaration (notTyped "tcVarDecl" l) isConst True tyspec' vars')
tcVarInit :: (ProverK loc m) => Bool -> Bool -> Scope -> Type -> VariableInitialization Identifier loc -> TcM m (VariableInitialization GIdentifier (Typed loc))
tcVarInit False isHavoc scope ty (VariableInitialization l v@(VarName vl n) szs e) = do
(ty',szs') <- tcTypeSizes l ty szs
e' <- withExprC ReadWriteExpr $ tcDefaultInitExpr l isHavoc ty' szs' e
-- add the array size to the type
-- do not store the size, since it can change dynamically
vn <- addConst scope (True,False) False n
let v' = VarName (Typed vl ty) vn
-- add variable to the environment
isAnn <- getAnn
newVariable scope False isAnn v' Nothing -- don't add values to the environment
return (VariableInitialization (notTyped "tcVarInit" l) v' szs' e')
tcVarInit True isHavoc scope ty (VariableInitialization l v@(VarName vl n) szs e) = do
(ty',szs') <- tcTypeSizes l ty szs
e' <- withExprC PureExpr $ tcDefaultInitExpr l isHavoc ty' szs' e
-- add the array size to the type
vn <- addConst scope (True,True) False n
let v' = VarName (Typed vl ty') vn
-- add variable to the environment
isAnn <- getAnn
newVariable scope True isAnn v' e'
return (VariableInitialization (notTyped "tcVarInit" l) v' szs' e')
tcDefaultInitExpr :: ProverK loc m => loc -> IsHavoc -> Type -> Maybe (Sizes GIdentifier (Typed loc)) -> Maybe (Expression Identifier loc) -> TcM m (Maybe (Expression GIdentifier (Typed loc)))
tcDefaultInitExpr l isHavoc ty szs (Just e) = withDef False $ do
liftM Just $ tcExprTy ty e
tcDefaultInitExpr l True ty szs Nothing = return Nothing
tcDefaultInitExpr l False ty szs Nothing = liftM Just $ withDef True $ do
x <- liftM varExpr $ newTypedVar "def" ty False Nothing
let szsl = case szs of
Nothing -> Nothing
Just (Sizes xs) -> Just $ map (mapFst $ fmap typed) $ Foldable.toList xs
topTcCstrM_ l $ Default szsl x
return $ fmap (Typed l) x
tcProcedureAnns :: ProverK loc m => Bool -> [ProcedureAnnotation Identifier loc] -> TcM m [ProcedureAnnotation GIdentifier (Typed loc)]
tcProcedureAnns isAxiom xs = do
(inlines,anns') <- Utils.mapAndUnzipM (tcProcedureAnn isAxiom) xs
case catMaybes inlines of
[] -> return ()
is -> chgDecClassM $ chgInlineDecClass (last is)
return anns'
tcProcedureAnn :: ProverK loc m => Bool -> ProcedureAnnotation Identifier loc -> TcM m (Maybe Bool,ProcedureAnnotation GIdentifier (Typed loc))
tcProcedureAnn isAxiom (PDecreasesAnn l e) = tcStmtBlock l "decreases annotation" $ tcAddDeps l "pann" $ insideAnnotation $ withLeak False $ do
if isAxiom
then do
ppe <- pp e
genTcError (locpos l) False $ text "decreases annotation not supported inside axioms:" <+> ppe
else do
(e') <- tcAnnExpr Nothing e
return (Nothing,PDecreasesAnn (Typed l $ typed $ loc e') e')
tcProcedureAnn isAxiom (RequiresAnn l isFree isLeak e) = tcStmtBlock l "requires annotation" $ tcAddDeps l "pann" $ insideAnnotation $ do
if (isAxiom && isFree)
then do
ppe <- pp e
genTcError (locpos l) False $ text "requires annotation must not be free inside axioms:" <+> ppe
else do
(isLeak',e') <- checkLeakMb l isLeak $ tcAnnGuard e
return (Nothing,RequiresAnn (Typed l $ typed $ loc e') isFree isLeak' e')
tcProcedureAnn isAxiom (EnsuresAnn l isFree isLeak e) = tcStmtBlock l "ensures annotation" $ tcAddDeps l "pann" $ insideAnnotation $ do
if (isAxiom && isFree)
then do
ppe <- pp e
genTcError (locpos l) False $ text "ensures annotation must not be free inside axioms:" <+> ppe
else do
(isLeak',e') <- checkLeakMb l isLeak $ tcAnnGuard e
return (Nothing,EnsuresAnn (Typed l $ typed $ loc e') isFree isLeak' e')
tcProcedureAnn isAxiom (InlineAnn l isInline) = tcStmtBlock l "inline annotation" $ tcAddDeps l "pann" $ do
if isAxiom
then do
genTcError (locpos l) False $ text "inline annotation not supported inside axioms"
else do
return (Just isInline,InlineAnn (notTyped "inline" l) isInline)
| haslab/SecreC | src/Language/SecreC/TypeChecker/Statement.hs | gpl-3.0 | 19,705 | 0 | 19 | 4,401 | 7,413 | 3,572 | 3,841 | 340 | 5 |
-- This file contains implementation of some commonly used kernel functions. Most recommended one is radial basis kernel.
{-# OPTIONS_GHC -XBangPatterns #-} -- For bang in function arguements to avoid laziness
module Svm.Kernel_functions where
import Data.List (foldl')
{- Kernel function gives inner product in feature space. Input- fisrt vector, second vector, parameter list -}
norm (x:xs) (y:ys) = foldl' (+) 0 $ zipWith (\x y -> (x-y)**2) xs ys
--1. This is the kernel function with radial basis. p is the value sigma^2 and ps can be empty
radialKernelFunction :: [Double] -> [Double] -> [Double] -> Double
radialKernelFunction xvector yvector (p:ps) = exp $ (norm xvector yvector)/(-p)
--2. This linear kernel function. Simply a dot product.
linearKernelFunction :: [Double] -> [Double] -> [Double] -> Double
linearKernelFunction xs ys p = foldl' (+) 0 $ zipWith (*) xs ys
--3. This is polynomial kernel function.
polyKernelFunction :: [Double] -> [Double] -> [Double] -> Double
polyKernelFunction xvector yvector (p0:p1:ps) = (p0 + (foldl' (+) 0 $ zipWith (*) xvector yvector))**p1
--4. This is quadratic kernel function.
quadraticKernelFunction :: [Double] -> [Double] -> [Double] -> Double
quadraticKernelFunction xvector yvector (p0:ps) = (p0 + linearKernelFunction xvector yvector ps)**2
quadraticKernelFunction xvector yvector [] = (1 + linearKernelFunction xvector yvector [])**2
--5. This is cubic kernel function.
cubicKernelFunction :: [Double] -> [Double] -> [Double] -> Double
cubicKernelFunction xvector yvector (p0:ps) = (p0 + linearKernelFunction xvector yvector ps)**3
cubicKernelFunction xvector yvector [] = (1 + linearKernelFunction xvector yvector [])**3
--6. This is histogram kernel function.
histKernelFunction :: [Double] -> [Double] -> [Double] -> Double
histKernelFunction (x:xs) (y:ys) ps | x < y = x + histKernelFunction xs ys ps
| otherwise = y + histKernelFunction xs ys ps
histKernelFunction _ _ _ = 0
--7. This is sigmoid kernel function.
sigmoidKernelFunction :: [Double] -> [Double] -> [Double] -> Double
sigmoidKernelFunction xvector yvector (p0:p1:ps) = tanh (p0 * (foldl' (+) 0 $ zipWith (*) xvector yvector) - p1)
-- =============================================================================================================
| rahulaaj/ML-Library | src/Svm/Kernel_functions.hs | gpl-3.0 | 2,383 | 15 | 11 | 434 | 665 | 367 | 298 | 22 | 1 |
module E2ASM.Assembler.Instruction
( Instruction(..)
) where
import qualified Data.Data as D
data Instruction
-- Moving
= LDI
| LDM
| STM
-- Arithmetic
| ADD
| ADDI
| SUB
| SUBI
| SR
| SL
| NEG
-- Bitwise
| AND
| OR
| XOR
| NOT
-- Predicate bitwise
| PAND
| POR
| PXOR
| PNOT
-- Comparison
| ZERO
| LEQ
| BEQ
| CARRY
| SIGN
-- External
| SWI
| IRET
| IN
| OUT
deriving (Eq, Show, D.Data)
| E2LP/e2asm | src/E2ASM/Assembler/Instruction.hs | gpl-3.0 | 460 | 0 | 7 | 156 | 132 | 88 | 44 | 32 | 0 |
{-|
Description : Parametrization of formatting
-}
module Language.Haskell.Formatter.Style
(Style, lineLengthLimit, ribbonsPerLine, successiveEmptyLinesLimit,
classIndentation, doIndentation, caseIndentation, letIndentation,
whereIndentation, onsideIndentation, orderImportDeclarations,
orderImportEntities, Indentation, defaultStyle, check)
where
import qualified Data.Maybe as Maybe
import qualified Language.Haskell.Formatter.Error as Error
import qualified Language.Haskell.Formatter.Internal.Newline as Newline
import qualified Language.Haskell.Formatter.Result as Result
import qualified Language.Haskell.Formatter.Source as Source
data Style = Style{lineLengthLimit :: Int, ribbonsPerLine :: Float,
successiveEmptyLinesLimit :: Int,
classIndentation :: Indentation,
doIndentation :: Indentation, caseIndentation :: Indentation,
letIndentation :: Indentation,
whereIndentation :: Indentation,
onsideIndentation :: Indentation,
orderImportDeclarations :: Bool, orderImportEntities :: Bool}
deriving (Eq, Ord, Show)
newtype Check = Check (Maybe String)
deriving (Eq, Ord, Show)
{-| Number of characters used to indent. -}
type Indentation = Int
defaultStyle :: Style
defaultStyle
= Style{lineLengthLimit = 80, ribbonsPerLine = 1,
successiveEmptyLinesLimit = 1,
classIndentation = Source.classIndent mode,
doIndentation = Source.doIndent mode,
caseIndentation = Source.caseIndent mode,
letIndentation = Source.letIndent mode,
whereIndentation = Source.whereIndent mode,
onsideIndentation = Source.onsideIndent mode,
orderImportDeclarations = True, orderImportEntities = True}
where mode = Source.defaultMode
check :: Style -> Result.Result ()
check style
= case maybeError of
Nothing -> return ()
Just message -> Result.fatalError $ Error.createStyleFormatError message
where maybeError
= case errorMessages of
[] -> Nothing
messages -> Just $ Newline.joinSeparatedLines messages
errorMessages = Maybe.mapMaybe unwrap $ createChecks style
unwrap (Check errorMessage) = errorMessage
createChecks :: Style -> [Check]
createChecks style
= concat
[[lineLengthLimitCheck, ribbonsPerLineCheck,
successiveEmptyLinesLimitCheck],
indentationChecks, [onsideLessCheck]]
where lineLengthLimitCheck
= createCheck (rawLineLengthLimit > 0)
["The line length limit must be positive, but it is ",
show rawLineLengthLimit, "."]
rawLineLengthLimit = lineLengthLimit style
ribbonsPerLineCheck
= createCheck (rawRibbonsPerLine >= 1)
["The ribbons per line ratio must be at least 1, but it is ",
show rawRibbonsPerLine, "."]
rawRibbonsPerLine = ribbonsPerLine style
successiveEmptyLinesLimitCheck
= createCheck (rawSuccessiveEmptyLinesLimit >= 0)
["The successive empty lines limit must not be negative, ",
"but it is ", show rawSuccessiveEmptyLinesLimit, "."]
rawSuccessiveEmptyLinesLimit = successiveEmptyLinesLimit style
indentationChecks = fmap checkIndentation indentations
checkIndentation (indentation, name)
= createCheck (indentation > 0)
["The ", name, " indentation must be positive, but it is ",
show indentation, "."]
indentations
= [(rawClassIndentation, "class"), (rawDoIndentation, "do"),
(rawCaseIndentation, "case"), (rawLetIndentation, "let"),
(rawWhereIndentation, "where"), (rawOnsideIndentation, onsideName)]
rawClassIndentation = classIndentation style
rawDoIndentation = doIndentation style
rawCaseIndentation = caseIndentation style
rawLetIndentation = letIndentation style
rawWhereIndentation = whereIndentation style
rawOnsideIndentation = onsideIndentation style
onsideName = "onside"
onsideLessCheck
= createCheck
(and $ fmap (> rawOnsideIndentation) greaterOnsideIndentations)
["The ", onsideName,
" indentation must be less than the other indentations, ",
"but it is ", show rawOnsideIndentation, "."]
greaterOnsideIndentations
= [rawClassIndentation, rawDoIndentation, rawCaseIndentation,
rawLetIndentation, rawWhereIndentation]
createCheck :: Bool -> [String] -> Check
createCheck False = Check . Just . concat
createCheck True = const $ Check Nothing
| evolutics/haskell-formatter | src/library/Language/Haskell/Formatter/Style.hs | gpl-3.0 | 4,784 | 0 | 12 | 1,248 | 912 | 532 | 380 | 94 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.UpdateStack
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Updates a specified stack.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_UpdateStack.html>
module Network.AWS.OpsWorks.UpdateStack
(
-- * Request
UpdateStack
-- ** Request constructor
, updateStack
-- ** Request lenses
, usAttributes
, usChefConfiguration
, usConfigurationManager
, usCustomCookbooksSource
, usCustomJson
, usDefaultAvailabilityZone
, usDefaultInstanceProfileArn
, usDefaultOs
, usDefaultRootDeviceType
, usDefaultSshKeyName
, usDefaultSubnetId
, usHostnameTheme
, usName
, usServiceRoleArn
, usStackId
, usUseCustomCookbooks
, usUseOpsworksSecurityGroups
-- * Response
, UpdateStackResponse
-- ** Response constructor
, updateStackResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
data UpdateStack = UpdateStack
{ _usAttributes :: Map StackAttributesKeys Text
, _usChefConfiguration :: Maybe ChefConfiguration
, _usConfigurationManager :: Maybe StackConfigurationManager
, _usCustomCookbooksSource :: Maybe Source
, _usCustomJson :: Maybe Text
, _usDefaultAvailabilityZone :: Maybe Text
, _usDefaultInstanceProfileArn :: Maybe Text
, _usDefaultOs :: Maybe Text
, _usDefaultRootDeviceType :: Maybe RootDeviceType
, _usDefaultSshKeyName :: Maybe Text
, _usDefaultSubnetId :: Maybe Text
, _usHostnameTheme :: Maybe Text
, _usName :: Maybe Text
, _usServiceRoleArn :: Maybe Text
, _usStackId :: Text
, _usUseCustomCookbooks :: Maybe Bool
, _usUseOpsworksSecurityGroups :: Maybe Bool
} deriving (Eq, Read, Show)
-- | 'UpdateStack' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'usAttributes' @::@ 'HashMap' 'StackAttributesKeys' 'Text'
--
-- * 'usChefConfiguration' @::@ 'Maybe' 'ChefConfiguration'
--
-- * 'usConfigurationManager' @::@ 'Maybe' 'StackConfigurationManager'
--
-- * 'usCustomCookbooksSource' @::@ 'Maybe' 'Source'
--
-- * 'usCustomJson' @::@ 'Maybe' 'Text'
--
-- * 'usDefaultAvailabilityZone' @::@ 'Maybe' 'Text'
--
-- * 'usDefaultInstanceProfileArn' @::@ 'Maybe' 'Text'
--
-- * 'usDefaultOs' @::@ 'Maybe' 'Text'
--
-- * 'usDefaultRootDeviceType' @::@ 'Maybe' 'RootDeviceType'
--
-- * 'usDefaultSshKeyName' @::@ 'Maybe' 'Text'
--
-- * 'usDefaultSubnetId' @::@ 'Maybe' 'Text'
--
-- * 'usHostnameTheme' @::@ 'Maybe' 'Text'
--
-- * 'usName' @::@ 'Maybe' 'Text'
--
-- * 'usServiceRoleArn' @::@ 'Maybe' 'Text'
--
-- * 'usStackId' @::@ 'Text'
--
-- * 'usUseCustomCookbooks' @::@ 'Maybe' 'Bool'
--
-- * 'usUseOpsworksSecurityGroups' @::@ 'Maybe' 'Bool'
--
updateStack :: Text -- ^ 'usStackId'
-> UpdateStack
updateStack p1 = UpdateStack
{ _usStackId = p1
, _usName = Nothing
, _usAttributes = mempty
, _usServiceRoleArn = Nothing
, _usDefaultInstanceProfileArn = Nothing
, _usDefaultOs = Nothing
, _usHostnameTheme = Nothing
, _usDefaultAvailabilityZone = Nothing
, _usDefaultSubnetId = Nothing
, _usCustomJson = Nothing
, _usConfigurationManager = Nothing
, _usChefConfiguration = Nothing
, _usUseCustomCookbooks = Nothing
, _usCustomCookbooksSource = Nothing
, _usDefaultSshKeyName = Nothing
, _usDefaultRootDeviceType = Nothing
, _usUseOpsworksSecurityGroups = Nothing
}
-- | One or more user-defined key/value pairs to be added to the stack attributes.
usAttributes :: Lens' UpdateStack (HashMap StackAttributesKeys Text)
usAttributes = lens _usAttributes (\s a -> s { _usAttributes = a }) . _Map
-- | A 'ChefConfiguration' object that specifies whether to enable Berkshelf and the
-- Berkshelf version on Chef 11.10 stacks. For more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-creating.html Create aNew Stack>.
usChefConfiguration :: Lens' UpdateStack (Maybe ChefConfiguration)
usChefConfiguration =
lens _usChefConfiguration (\s a -> s { _usChefConfiguration = a })
-- | The configuration manager. When you clone a stack we recommend that you use
-- the configuration manager to specify the Chef version, 0.9, 11.4, or 11.10.
-- The default value is currently 11.4.
usConfigurationManager :: Lens' UpdateStack (Maybe StackConfigurationManager)
usConfigurationManager =
lens _usConfigurationManager (\s a -> s { _usConfigurationManager = a })
usCustomCookbooksSource :: Lens' UpdateStack (Maybe Source)
usCustomCookbooksSource =
lens _usCustomCookbooksSource (\s a -> s { _usCustomCookbooksSource = a })
-- | A string that contains user-defined, custom JSON. It can be used to override
-- the corresponding default stack configuration JSON values or to pass data to
-- recipes. The string should be in the following format and must escape
-- characters such as '"'.:
--
-- '"{\"key1\": \"value1\", \"key2\": \"value2\",...}"'
--
-- For more information on custom JSON, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-json.html Use Custom JSON to Modify the StackConfiguration Attributes>.
usCustomJson :: Lens' UpdateStack (Maybe Text)
usCustomJson = lens _usCustomJson (\s a -> s { _usCustomJson = a })
-- | The stack's default Availability Zone, which must be in the specified region.
-- For more information, see <http://docs.aws.amazon.com/general/latest/gr/rande.html Regions and Endpoints>. If you also specify a value
-- for 'DefaultSubnetId', the subnet must be in the same zone. For more
-- information, see 'CreateStack'.
usDefaultAvailabilityZone :: Lens' UpdateStack (Maybe Text)
usDefaultAvailabilityZone =
lens _usDefaultAvailabilityZone
(\s a -> s { _usDefaultAvailabilityZone = a })
-- | The ARN of an IAM profile that is the default profile for all of the stack's
-- EC2 instances. For more information about IAM ARNs, see <http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html Using Identifiers>.
usDefaultInstanceProfileArn :: Lens' UpdateStack (Maybe Text)
usDefaultInstanceProfileArn =
lens _usDefaultInstanceProfileArn
(\s a -> s { _usDefaultInstanceProfileArn = a })
-- | The stack's operating system, which must be set to one of the following.
--
-- Standard Linux operating systems: an Amazon Linux version such as 'AmazonLinux 2014.09', 'Ubuntu 12.04 LTS', or 'Ubuntu 14.04 LTS'. Custom Linux AMIs: 'Custom'. You specify the custom AMI you want to use when you create instances. Microsoft Windows Server 2012 R2.
-- The default option is the current Amazon Linux version.
usDefaultOs :: Lens' UpdateStack (Maybe Text)
usDefaultOs = lens _usDefaultOs (\s a -> s { _usDefaultOs = a })
-- | The default root device type. This value is used by default for all instances
-- in the stack, but you can override it when you create an instance. For more
-- information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ComponentsAMIs.html#storage-for-the-root-device Storage for the Root Device>.
usDefaultRootDeviceType :: Lens' UpdateStack (Maybe RootDeviceType)
usDefaultRootDeviceType =
lens _usDefaultRootDeviceType (\s a -> s { _usDefaultRootDeviceType = a })
-- | A default Amazon EC2 key pair name. The default value is none. If you specify
-- a key pair name, AWS OpsWorks installs the public key on the instance and you
-- can use the private key with an SSH client to log in to the instance. For
-- more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-ssh.html Using SSH to Communicate with an Instance> and <http://docs.aws.amazon.com/opsworks/latest/userguide/security-ssh-access.html Managing SSH Access>. You can override this setting by specifying a different
-- key pair, or no key pair, when you <http://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-add.html create an instance>.
usDefaultSshKeyName :: Lens' UpdateStack (Maybe Text)
usDefaultSshKeyName =
lens _usDefaultSshKeyName (\s a -> s { _usDefaultSshKeyName = a })
-- | The stack's default VPC subnet ID. This parameter is required if you specify
-- a value for the 'VpcId' parameter. All instances are launched into this subnet
-- unless you specify otherwise when you create the instance. If you also
-- specify a value for 'DefaultAvailabilityZone', the subnet must be in that zone.
-- For information on default values and when this parameter is required, see
-- the 'VpcId' parameter description.
usDefaultSubnetId :: Lens' UpdateStack (Maybe Text)
usDefaultSubnetId =
lens _usDefaultSubnetId (\s a -> s { _usDefaultSubnetId = a })
-- | The stack's new host name theme, with spaces are replaced by underscores. The
-- theme is used to generate host names for the stack's instances. By default, 'HostnameTheme' is set to 'Layer_Dependent', which creates host names by appending integers to
-- the layer's short name. The other themes are:
--
-- 'Baked_Goods' 'Clouds' 'Europe_Cities' 'Fruits' 'Greek_Deities' 'Legendary_creatures_from_Japan' 'Planets_and_Moons' 'Roman_Deities' 'Scottish_Islands' 'US_Cities' 'Wild_Cats' To obtain a generated host name, call 'GetHostNameSuggestion', which returns
-- a host name based on the current theme.
usHostnameTheme :: Lens' UpdateStack (Maybe Text)
usHostnameTheme = lens _usHostnameTheme (\s a -> s { _usHostnameTheme = a })
-- | The stack's new name.
usName :: Lens' UpdateStack (Maybe Text)
usName = lens _usName (\s a -> s { _usName = a })
-- | The stack AWS Identity and Access Management (IAM) role, which allows AWS
-- OpsWorks to work with AWS resources on your behalf. You must set this
-- parameter to the Amazon Resource Name (ARN) for an existing IAM role. For
-- more information about IAM ARNs, see <http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html Using Identifiers>.
--
-- You must set this parameter to a valid service role ARN or the action will
-- fail; there is no default value. You can specify the stack's current service
-- role ARN, if you prefer, but you must do so explicitly.
--
--
usServiceRoleArn :: Lens' UpdateStack (Maybe Text)
usServiceRoleArn = lens _usServiceRoleArn (\s a -> s { _usServiceRoleArn = a })
-- | The stack ID.
usStackId :: Lens' UpdateStack Text
usStackId = lens _usStackId (\s a -> s { _usStackId = a })
-- | Whether the stack uses custom cookbooks.
usUseCustomCookbooks :: Lens' UpdateStack (Maybe Bool)
usUseCustomCookbooks =
lens _usUseCustomCookbooks (\s a -> s { _usUseCustomCookbooks = a })
-- | Whether to associate the AWS OpsWorks built-in security groups with the
-- stack's layers.
--
-- AWS OpsWorks provides a standard set of built-in security groups, one for
-- each layer, which are associated with layers by default. 'UseOpsworksSecurityGroups' allows you to instead provide your own custom security groups. 'UseOpsworksSecurityGroups' has the following settings:
--
-- True - AWS OpsWorks automatically associates the appropriate built-in
-- security group with each layer (default setting). You can associate
-- additional security groups with a layer after you create it but you cannot
-- delete the built-in security group. False - AWS OpsWorks does not associate
-- built-in security groups with layers. You must create appropriate EC2
-- security groups and associate a security group with each layer that you
-- create. However, you can still manually associate a built-in security group
-- with a layer on creation; custom security groups are required only for those
-- layers that need custom settings. For more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-creating.html Create a NewStack>.
usUseOpsworksSecurityGroups :: Lens' UpdateStack (Maybe Bool)
usUseOpsworksSecurityGroups =
lens _usUseOpsworksSecurityGroups
(\s a -> s { _usUseOpsworksSecurityGroups = a })
data UpdateStackResponse = UpdateStackResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'UpdateStackResponse' constructor.
updateStackResponse :: UpdateStackResponse
updateStackResponse = UpdateStackResponse
instance ToPath UpdateStack where
toPath = const "/"
instance ToQuery UpdateStack where
toQuery = const mempty
instance ToHeaders UpdateStack
instance ToJSON UpdateStack where
toJSON UpdateStack{..} = object
[ "StackId" .= _usStackId
, "Name" .= _usName
, "Attributes" .= _usAttributes
, "ServiceRoleArn" .= _usServiceRoleArn
, "DefaultInstanceProfileArn" .= _usDefaultInstanceProfileArn
, "DefaultOs" .= _usDefaultOs
, "HostnameTheme" .= _usHostnameTheme
, "DefaultAvailabilityZone" .= _usDefaultAvailabilityZone
, "DefaultSubnetId" .= _usDefaultSubnetId
, "CustomJson" .= _usCustomJson
, "ConfigurationManager" .= _usConfigurationManager
, "ChefConfiguration" .= _usChefConfiguration
, "UseCustomCookbooks" .= _usUseCustomCookbooks
, "CustomCookbooksSource" .= _usCustomCookbooksSource
, "DefaultSshKeyName" .= _usDefaultSshKeyName
, "DefaultRootDeviceType" .= _usDefaultRootDeviceType
, "UseOpsworksSecurityGroups" .= _usUseOpsworksSecurityGroups
]
instance AWSRequest UpdateStack where
type Sv UpdateStack = OpsWorks
type Rs UpdateStack = UpdateStackResponse
request = post "UpdateStack"
response = nullResponse UpdateStackResponse
| romanb/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/UpdateStack.hs | mpl-2.0 | 15,244 | 0 | 10 | 3,081 | 1,565 | 946 | 619 | 157 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeSnapshots
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes one or more of the Amazon EBS snapshots available to you. Available
-- snapshots include public snapshots available for any AWS account to launch,
-- private snapshots that you own, and private snapshots owned by another AWS
-- account but for which you've been given explicit create volume permissions.
--
-- The create volume permissions fall into the following categories:
--
-- /public/: The owner of the snapshot granted create volume permissions for
-- the snapshot to the 'all' group. All AWS accounts have create volume
-- permissions for these snapshots. /explicit/: The owner of the snapshot granted
-- create volume permissions to a specific AWS account. /implicit/: An AWS
-- account has implicit create volume permissions for all snapshots it owns. The list of snapshots returned can be modified by specifying snapshot IDs, snapshot owners, or AWS accounts with create volume permissions. If no options are specified, Amazon EC2 returns all snapshots for which you have create volume permissions.
--
--
-- If you specify one or more snapshot IDs, only snapshots that have the
-- specified IDs are returned. If you specify an invalid snapshot ID, an error
-- is returned. If you specify a snapshot ID for which you do not have access,
-- it is not included in the returned results.
--
-- If you specify one or more snapshot owners, only snapshots from the
-- specified owners and for which you have access are returned. The results can
-- include the AWS account IDs of the specified owners, 'amazon' for snapshots
-- owned by Amazon, or 'self' for snapshots that you own.
--
-- If you specify a list of restorable users, only snapshots with create
-- snapshot permissions for those users are returned. You can specify AWS
-- account IDs (if you own the snapshots), 'self' for snapshots for which you own
-- or have explicit permissions, or 'all' for public snapshots.
--
-- If you are describing a long list of snapshots, you can paginate the output
-- to make the list more manageable. The 'MaxResults' parameter sets the maximum
-- number of results returned in a single page. If the list of results exceeds
-- your 'MaxResults' value, then that number of results is returned along with a 'NextToken' value that can be passed to a subsequent 'DescribeSnapshots' request to
-- retrieve the remaining results.
--
-- For more information about Amazon EBS snapshots, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSSnapshots.html Amazon EBS Snapshots> in
-- the /Amazon Elastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSnapshots.html>
module Network.AWS.EC2.DescribeSnapshots
(
-- * Request
DescribeSnapshots
-- ** Request constructor
, describeSnapshots
-- ** Request lenses
, ds1DryRun
, ds1Filters
, ds1MaxResults
, ds1NextToken
, ds1OwnerIds
, ds1RestorableByUserIds
, ds1SnapshotIds
-- * Response
, DescribeSnapshotsResponse
-- ** Response constructor
, describeSnapshotsResponse
-- ** Response lenses
, dsrNextToken
, dsrSnapshots
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeSnapshots = DescribeSnapshots
{ _ds1DryRun :: Maybe Bool
, _ds1Filters :: List "Filter" Filter
, _ds1MaxResults :: Maybe Int
, _ds1NextToken :: Maybe Text
, _ds1OwnerIds :: List "Owner" Text
, _ds1RestorableByUserIds :: List "RestorableBy" Text
, _ds1SnapshotIds :: List "SnapshotId" Text
} deriving (Eq, Read, Show)
-- | 'DescribeSnapshots' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ds1DryRun' @::@ 'Maybe' 'Bool'
--
-- * 'ds1Filters' @::@ ['Filter']
--
-- * 'ds1MaxResults' @::@ 'Maybe' 'Int'
--
-- * 'ds1NextToken' @::@ 'Maybe' 'Text'
--
-- * 'ds1OwnerIds' @::@ ['Text']
--
-- * 'ds1RestorableByUserIds' @::@ ['Text']
--
-- * 'ds1SnapshotIds' @::@ ['Text']
--
describeSnapshots :: DescribeSnapshots
describeSnapshots = DescribeSnapshots
{ _ds1DryRun = Nothing
, _ds1SnapshotIds = mempty
, _ds1OwnerIds = mempty
, _ds1RestorableByUserIds = mempty
, _ds1Filters = mempty
, _ds1NextToken = Nothing
, _ds1MaxResults = Nothing
}
ds1DryRun :: Lens' DescribeSnapshots (Maybe Bool)
ds1DryRun = lens _ds1DryRun (\s a -> s { _ds1DryRun = a })
-- | One or more filters.
--
-- 'description' - A description of the snapshot.
--
-- 'owner-alias' - The AWS account alias (for example, 'amazon') that owns the
-- snapshot.
--
-- 'owner-id' - The ID of the AWS account that owns the snapshot.
--
-- 'progress' - The progress of the snapshot, as a percentage (for example,
-- 80%).
--
-- 'snapshot-id' - The snapshot ID.
--
-- 'start-time' - The time stamp when the snapshot was initiated.
--
-- 'status' - The status of the snapshot ('pending' | 'completed' | 'error').
--
-- 'tag':/key/=/value/ - The key/value combination of a tag assigned to the
-- resource.
--
-- 'tag-key' - The key of a tag assigned to the resource. This filter is
-- independent of the 'tag-value' filter. For example, if you use both the filter
-- "tag-key=Purpose" and the filter "tag-value=X", you get any resources
-- assigned both the tag key Purpose (regardless of what the tag's value is),
-- and the tag value X (regardless of what the tag's key is). If you want to
-- list only resources where Purpose is X, see the 'tag':/key/=/value/ filter.
--
-- 'tag-value' - The value of a tag assigned to the resource. This filter is
-- independent of the 'tag-key' filter.
--
-- 'volume-id' - The ID of the volume the snapshot is for.
--
-- 'volume-size' - The size of the volume, in GiB.
--
--
ds1Filters :: Lens' DescribeSnapshots [Filter]
ds1Filters = lens _ds1Filters (\s a -> s { _ds1Filters = a }) . _List
-- | The maximum number of snapshot results returned by 'DescribeSnapshots' in
-- paginated output. When this parameter is used, 'DescribeSnapshots' only returns 'MaxResults' results in a single page along with a 'NextToken' response element.
-- The remaining results of the initial request can be seen by sending another 'DescribeSnapshots' request with the returned 'NextToken' value. This value can be between 5 and
-- 1000; if 'MaxResults' is given a value larger than 1000, only 1000 results are
-- returned. If this parameter is not used, then 'DescribeSnapshots' returns all
-- results. You cannot specify this parameter and the snapshot IDs parameter in
-- the same request.
ds1MaxResults :: Lens' DescribeSnapshots (Maybe Int)
ds1MaxResults = lens _ds1MaxResults (\s a -> s { _ds1MaxResults = a })
-- | The 'NextToken' value returned from a previous paginated 'DescribeSnapshots'
-- request where 'MaxResults' was used and the results exceeded the value of that
-- parameter. Pagination continues from the end of the previous results that
-- returned the 'NextToken' value. This value is 'null' when there are no more
-- results to return.
ds1NextToken :: Lens' DescribeSnapshots (Maybe Text)
ds1NextToken = lens _ds1NextToken (\s a -> s { _ds1NextToken = a })
-- | Returns the snapshots owned by the specified owner. Multiple owners can be
-- specified.
ds1OwnerIds :: Lens' DescribeSnapshots [Text]
ds1OwnerIds = lens _ds1OwnerIds (\s a -> s { _ds1OwnerIds = a }) . _List
-- | One or more AWS accounts IDs that can create volumes from the snapshot.
ds1RestorableByUserIds :: Lens' DescribeSnapshots [Text]
ds1RestorableByUserIds =
lens _ds1RestorableByUserIds (\s a -> s { _ds1RestorableByUserIds = a })
. _List
-- | One or more snapshot IDs.
--
-- Default: Describes snapshots for which you have launch permissions.
ds1SnapshotIds :: Lens' DescribeSnapshots [Text]
ds1SnapshotIds = lens _ds1SnapshotIds (\s a -> s { _ds1SnapshotIds = a }) . _List
data DescribeSnapshotsResponse = DescribeSnapshotsResponse
{ _dsrNextToken :: Maybe Text
, _dsrSnapshots :: List "item" Snapshot
} deriving (Eq, Read, Show)
-- | 'DescribeSnapshotsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dsrNextToken' @::@ 'Maybe' 'Text'
--
-- * 'dsrSnapshots' @::@ ['Snapshot']
--
describeSnapshotsResponse :: DescribeSnapshotsResponse
describeSnapshotsResponse = DescribeSnapshotsResponse
{ _dsrSnapshots = mempty
, _dsrNextToken = Nothing
}
-- | The 'NextToken' value to include in a future 'DescribeSnapshots' request. When
-- the results of a 'DescribeSnapshots' request exceed 'MaxResults', this value can
-- be used to retrieve the next page of results. This value is 'null' when there
-- are no more results to return.
dsrNextToken :: Lens' DescribeSnapshotsResponse (Maybe Text)
dsrNextToken = lens _dsrNextToken (\s a -> s { _dsrNextToken = a })
dsrSnapshots :: Lens' DescribeSnapshotsResponse [Snapshot]
dsrSnapshots = lens _dsrSnapshots (\s a -> s { _dsrSnapshots = a }) . _List
instance ToPath DescribeSnapshots where
toPath = const "/"
instance ToQuery DescribeSnapshots where
toQuery DescribeSnapshots{..} = mconcat
[ "DryRun" =? _ds1DryRun
, "Filter" `toQueryList` _ds1Filters
, "MaxResults" =? _ds1MaxResults
, "NextToken" =? _ds1NextToken
, "Owner" `toQueryList` _ds1OwnerIds
, "RestorableBy" `toQueryList` _ds1RestorableByUserIds
, "SnapshotId" `toQueryList` _ds1SnapshotIds
]
instance ToHeaders DescribeSnapshots
instance AWSRequest DescribeSnapshots where
type Sv DescribeSnapshots = EC2
type Rs DescribeSnapshots = DescribeSnapshotsResponse
request = post "DescribeSnapshots"
response = xmlResponse
instance FromXML DescribeSnapshotsResponse where
parseXML x = DescribeSnapshotsResponse
<$> x .@? "nextToken"
<*> x .@? "snapshotSet" .!@ mempty
instance AWSPager DescribeSnapshots where
page rq rs
| stop (rs ^. dsrNextToken) = Nothing
| otherwise = (\x -> rq & ds1NextToken ?~ x)
<$> (rs ^. dsrNextToken)
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeSnapshots.hs | mpl-2.0 | 11,202 | 0 | 11 | 2,269 | 1,093 | 681 | 412 | 101 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ProximityBeacon.Beacons.Decommission
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Decommissions the specified beacon in the service. This beacon will no
-- longer be returned from \`beaconinfo.getforobserved\`. This operation is
-- permanent -- you will not be able to re-register a beacon with this ID
-- again. Authenticate using an [OAuth access
-- token](https:\/\/developers.google.com\/identity\/protocols\/OAuth2)
-- from a signed-in user with **Is owner** or **Can edit** permissions in
-- the Google Developers Console project.
--
-- /See:/ <https://developers.google.com/beacons/proximity/ Google Proximity Beacon API Reference> for @proximitybeacon.beacons.decommission@.
module Network.Google.Resource.ProximityBeacon.Beacons.Decommission
(
-- * REST Resource
BeaconsDecommissionResource
-- * Creating a Request
, beaconsDecommission
, BeaconsDecommission
-- * Request Lenses
, beaXgafv
, beaUploadProtocol
, beaPp
, beaAccessToken
, beaBeaconName
, beaUploadType
, beaBearerToken
, beaProjectId
, beaCallback
) where
import Network.Google.Prelude
import Network.Google.ProximityBeacon.Types
-- | A resource alias for @proximitybeacon.beacons.decommission@ method which the
-- 'BeaconsDecommission' request conforms to.
type BeaconsDecommissionResource =
"v1beta1" :>
CaptureMode "beaconName" "decommission" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "projectId" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Post '[JSON] Empty
-- | Decommissions the specified beacon in the service. This beacon will no
-- longer be returned from \`beaconinfo.getforobserved\`. This operation is
-- permanent -- you will not be able to re-register a beacon with this ID
-- again. Authenticate using an [OAuth access
-- token](https:\/\/developers.google.com\/identity\/protocols\/OAuth2)
-- from a signed-in user with **Is owner** or **Can edit** permissions in
-- the Google Developers Console project.
--
-- /See:/ 'beaconsDecommission' smart constructor.
data BeaconsDecommission = BeaconsDecommission'
{ _beaXgafv :: !(Maybe Text)
, _beaUploadProtocol :: !(Maybe Text)
, _beaPp :: !Bool
, _beaAccessToken :: !(Maybe Text)
, _beaBeaconName :: !Text
, _beaUploadType :: !(Maybe Text)
, _beaBearerToken :: !(Maybe Text)
, _beaProjectId :: !(Maybe Text)
, _beaCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BeaconsDecommission' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'beaXgafv'
--
-- * 'beaUploadProtocol'
--
-- * 'beaPp'
--
-- * 'beaAccessToken'
--
-- * 'beaBeaconName'
--
-- * 'beaUploadType'
--
-- * 'beaBearerToken'
--
-- * 'beaProjectId'
--
-- * 'beaCallback'
beaconsDecommission
:: Text -- ^ 'beaBeaconName'
-> BeaconsDecommission
beaconsDecommission pBeaBeaconName_ =
BeaconsDecommission'
{ _beaXgafv = Nothing
, _beaUploadProtocol = Nothing
, _beaPp = True
, _beaAccessToken = Nothing
, _beaBeaconName = pBeaBeaconName_
, _beaUploadType = Nothing
, _beaBearerToken = Nothing
, _beaProjectId = Nothing
, _beaCallback = Nothing
}
-- | V1 error format.
beaXgafv :: Lens' BeaconsDecommission (Maybe Text)
beaXgafv = lens _beaXgafv (\ s a -> s{_beaXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
beaUploadProtocol :: Lens' BeaconsDecommission (Maybe Text)
beaUploadProtocol
= lens _beaUploadProtocol
(\ s a -> s{_beaUploadProtocol = a})
-- | Pretty-print response.
beaPp :: Lens' BeaconsDecommission Bool
beaPp = lens _beaPp (\ s a -> s{_beaPp = a})
-- | OAuth access token.
beaAccessToken :: Lens' BeaconsDecommission (Maybe Text)
beaAccessToken
= lens _beaAccessToken
(\ s a -> s{_beaAccessToken = a})
-- | Beacon that should be decommissioned. A beacon name has the format
-- \"beacons\/N!beaconId\" where the beaconId is the base16 ID broadcast by
-- the beacon and N is a code for the beacon\'s type. Possible values are
-- \`3\` for Eddystone-UID, \`4\` for Eddystone-EID, \`1\` for iBeacon, or
-- \`5\` for AltBeacon. For Eddystone-EID beacons, you may use either the
-- current EID of the beacon\'s \"stable\" UID. Required.
beaBeaconName :: Lens' BeaconsDecommission Text
beaBeaconName
= lens _beaBeaconName
(\ s a -> s{_beaBeaconName = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
beaUploadType :: Lens' BeaconsDecommission (Maybe Text)
beaUploadType
= lens _beaUploadType
(\ s a -> s{_beaUploadType = a})
-- | OAuth bearer token.
beaBearerToken :: Lens' BeaconsDecommission (Maybe Text)
beaBearerToken
= lens _beaBearerToken
(\ s a -> s{_beaBearerToken = a})
-- | The project id of the beacon to decommission. If the project id is not
-- specified then the project making the request is used. The project id
-- must match the project that owns the beacon. Optional.
beaProjectId :: Lens' BeaconsDecommission (Maybe Text)
beaProjectId
= lens _beaProjectId (\ s a -> s{_beaProjectId = a})
-- | JSONP
beaCallback :: Lens' BeaconsDecommission (Maybe Text)
beaCallback
= lens _beaCallback (\ s a -> s{_beaCallback = a})
instance GoogleRequest BeaconsDecommission where
type Rs BeaconsDecommission = Empty
type Scopes BeaconsDecommission =
'["https://www.googleapis.com/auth/userlocation.beacon.registry"]
requestClient BeaconsDecommission'{..}
= go _beaBeaconName _beaXgafv _beaUploadProtocol
(Just _beaPp)
_beaAccessToken
_beaUploadType
_beaBearerToken
_beaProjectId
_beaCallback
(Just AltJSON)
proximityBeaconService
where go
= buildClient
(Proxy :: Proxy BeaconsDecommissionResource)
mempty
| rueshyna/gogol | gogol-proximitybeacon/gen/Network/Google/Resource/ProximityBeacon/Beacons/Decommission.hs | mpl-2.0 | 7,069 | 0 | 18 | 1,587 | 949 | 557 | 392 | 131 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetPools.RemoveInstance
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Removes instance URL from a target pool.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetPools.removeInstance@.
module Network.Google.Resource.Compute.TargetPools.RemoveInstance
(
-- * REST Resource
TargetPoolsRemoveInstanceResource
-- * Creating a Request
, targetPoolsRemoveInstance
, TargetPoolsRemoveInstance
-- * Request Lenses
, tpriRequestId
, tpriProject
, tpriTargetPool
, tpriPayload
, tpriRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetPools.removeInstance@ method which the
-- 'TargetPoolsRemoveInstance' request conforms to.
type TargetPoolsRemoveInstanceResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetPools" :>
Capture "targetPool" Text :>
"removeInstance" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TargetPoolsRemoveInstanceRequest :>
Post '[JSON] Operation
-- | Removes instance URL from a target pool.
--
-- /See:/ 'targetPoolsRemoveInstance' smart constructor.
data TargetPoolsRemoveInstance =
TargetPoolsRemoveInstance'
{ _tpriRequestId :: !(Maybe Text)
, _tpriProject :: !Text
, _tpriTargetPool :: !Text
, _tpriPayload :: !TargetPoolsRemoveInstanceRequest
, _tpriRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetPoolsRemoveInstance' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tpriRequestId'
--
-- * 'tpriProject'
--
-- * 'tpriTargetPool'
--
-- * 'tpriPayload'
--
-- * 'tpriRegion'
targetPoolsRemoveInstance
:: Text -- ^ 'tpriProject'
-> Text -- ^ 'tpriTargetPool'
-> TargetPoolsRemoveInstanceRequest -- ^ 'tpriPayload'
-> Text -- ^ 'tpriRegion'
-> TargetPoolsRemoveInstance
targetPoolsRemoveInstance pTpriProject_ pTpriTargetPool_ pTpriPayload_ pTpriRegion_ =
TargetPoolsRemoveInstance'
{ _tpriRequestId = Nothing
, _tpriProject = pTpriProject_
, _tpriTargetPool = pTpriTargetPool_
, _tpriPayload = pTpriPayload_
, _tpriRegion = pTpriRegion_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
tpriRequestId :: Lens' TargetPoolsRemoveInstance (Maybe Text)
tpriRequestId
= lens _tpriRequestId
(\ s a -> s{_tpriRequestId = a})
-- | Project ID for this request.
tpriProject :: Lens' TargetPoolsRemoveInstance Text
tpriProject
= lens _tpriProject (\ s a -> s{_tpriProject = a})
-- | Name of the TargetPool resource to remove instances from.
tpriTargetPool :: Lens' TargetPoolsRemoveInstance Text
tpriTargetPool
= lens _tpriTargetPool
(\ s a -> s{_tpriTargetPool = a})
-- | Multipart request metadata.
tpriPayload :: Lens' TargetPoolsRemoveInstance TargetPoolsRemoveInstanceRequest
tpriPayload
= lens _tpriPayload (\ s a -> s{_tpriPayload = a})
-- | Name of the region scoping this request.
tpriRegion :: Lens' TargetPoolsRemoveInstance Text
tpriRegion
= lens _tpriRegion (\ s a -> s{_tpriRegion = a})
instance GoogleRequest TargetPoolsRemoveInstance
where
type Rs TargetPoolsRemoveInstance = Operation
type Scopes TargetPoolsRemoveInstance =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetPoolsRemoveInstance'{..}
= go _tpriProject _tpriRegion _tpriTargetPool
_tpriRequestId
(Just AltJSON)
_tpriPayload
computeService
where go
= buildClient
(Proxy :: Proxy TargetPoolsRemoveInstanceResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetPools/RemoveInstance.hs | mpl-2.0 | 5,489 | 0 | 19 | 1,254 | 636 | 378 | 258 | 101 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Users.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new EMM-managed user. The Users resource passed in the body of
-- the request should include an accountIdentifier and an accountType. If a
-- corresponding user already exists with the same account identifier, the
-- user will be updated with the resource. In this case only the
-- displayName field can be changed.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.users.insert@.
module Network.Google.Resource.AndroidEnterprise.Users.Insert
(
-- * REST Resource
UsersInsertResource
-- * Creating a Request
, usersInsert
, UsersInsert
-- * Request Lenses
, uiEnterpriseId
, uiPayload
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.users.insert@ method which the
-- 'UsersInsert' request conforms to.
type UsersInsertResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"users" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] User :> Post '[JSON] User
-- | Creates a new EMM-managed user. The Users resource passed in the body of
-- the request should include an accountIdentifier and an accountType. If a
-- corresponding user already exists with the same account identifier, the
-- user will be updated with the resource. In this case only the
-- displayName field can be changed.
--
-- /See:/ 'usersInsert' smart constructor.
data UsersInsert = UsersInsert'
{ _uiEnterpriseId :: !Text
, _uiPayload :: !User
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UsersInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uiEnterpriseId'
--
-- * 'uiPayload'
usersInsert
:: Text -- ^ 'uiEnterpriseId'
-> User -- ^ 'uiPayload'
-> UsersInsert
usersInsert pUiEnterpriseId_ pUiPayload_ =
UsersInsert'
{ _uiEnterpriseId = pUiEnterpriseId_
, _uiPayload = pUiPayload_
}
-- | The ID of the enterprise.
uiEnterpriseId :: Lens' UsersInsert Text
uiEnterpriseId
= lens _uiEnterpriseId
(\ s a -> s{_uiEnterpriseId = a})
-- | Multipart request metadata.
uiPayload :: Lens' UsersInsert User
uiPayload
= lens _uiPayload (\ s a -> s{_uiPayload = a})
instance GoogleRequest UsersInsert where
type Rs UsersInsert = User
type Scopes UsersInsert =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient UsersInsert'{..}
= go _uiEnterpriseId (Just AltJSON) _uiPayload
androidEnterpriseService
where go
= buildClient (Proxy :: Proxy UsersInsertResource)
mempty
| rueshyna/gogol | gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Users/Insert.hs | mpl-2.0 | 3,649 | 0 | 14 | 813 | 394 | 240 | 154 | 62 | 1 |
func | True = x
| lspitzner/brittany | data/Test87.hs | agpl-3.0 | 16 | 0 | 7 | 5 | 12 | 5 | 7 | 1 | 1 |
{-# LANGUAGE RankNTypes #-}
module Haskell.Codewars.Church where
import Prelude hiding (succ, pred, fst, snd)
pred :: Number -> Number
pred (Nr n) = Nr $ \f x -> n (\h g -> g $ h f) (\_ -> x) id
subt :: Number -> Number -> Number
subt f (Nr g) = g pred f
-- cannot be placed in Preloaded right now as there is a bug in the codewars system
newtype Pair a b = Pr (forall c . (a -> b -> c) -> c)
instance (Show a, Show b) => Show (Pair a b) where
show (Pr p) = p (\ a b -> "(" ++ show a ++ "," ++ show b ++ ")")
pair :: a -> b -> Pair a b
pair f s = Pr (\ b -> b f s)
fst :: Pair a b -> a
fst (Pr p) = p const
snd :: Pair a b -> b
snd (Pr p) = p $ flip const
newtype Number = Nr (forall a. (a -> a) -> a -> a)
instance Show Number where
show (Nr a) = a ("1+" ++) "0"
--
instance Eq Number where
a == b = eval a == eval b
--
fold :: Number -> (a -> a) -> a -> a
fold (Nr n) s z = n s z
eval :: Number -> Integer
eval (Nr a) = a (+ 1) 0
zero :: Number
zero = Nr $ flip const
succ :: Number -> Number
succ (Nr a) = Nr (\ s z -> s (a s z))
add :: Number -> Number -> Number
add (Nr a) = a succ
mult :: Number -> Number -> Number
mult (Nr a) b = a (add b) zero
| ice1000/OI-codes | codewars/301-400/church-numbers-find-the-predecessor-and-subtract.hs | agpl-3.0 | 1,181 | 0 | 13 | 324 | 656 | 343 | 313 | 33 | 1 |
{-# LANGUAGE GADTs #-}
module OpenCog.Lib where
import OpenCog.AtomSpace
import Foreign.C
import Foreign.Ptr
foreign export ccall "someFunc"
c_func :: Ptr AtomSpaceRef -> UUID -> IO (UUID)
c_func = exportFunction someFunc
someFunc :: Atom -> AtomSpace Atom
someFunc a = pure a
| ceefour/atomspace | tests/haskell/executionTestLib/src/OpenCog/Lib.hs | agpl-3.0 | 285 | 0 | 9 | 49 | 82 | 44 | 38 | 10 | 1 |
module StearnsWharf.Materials where
import Data.Ratio (Ratio)
data Material = Wood {
emodulus,
mySigma,
myTau :: Double,
glulam :: Bool, -- ^ Limtre == True
stClass :: String -- ^ Strength Class
}
| Glulam {
emodulus, mySigma, myTau :: Double }
| Steel {
emodulus2, mySigma2, myTau2 :: Ratio Integer }
| Concrete {
emodulus :: Double }
deriving Show
data Stress = Stress { sigma, tau :: Double }
deriving Show
| baalbek/stearnswharf | src/StearnsWharf/Materials.hs | lgpl-3.0 | 722 | 0 | 9 | 384 | 116 | 78 | 38 | 17 | 0 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-|
Module : Haskoin.Keys.Common
Copyright : No rights reserved
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
ECDSA private and public key functions.
-}
module Haskoin.Keys.Common
( -- * Public & Private Keys
PubKeyI(..)
, SecKeyI(..)
, exportPubKey
, importPubKey
, wrapPubKey
, derivePubKeyI
, wrapSecKey
, fromMiniKey
, tweakPubKey
, tweakSecKey
, getSecKey
, secKey
-- ** Private Key Wallet Import Format (WIF)
, fromWif
, toWif
) where
import Control.Applicative ((<|>))
import Control.DeepSeq
import Control.Monad (guard, mzero, (<=<))
import Crypto.Secp256k1
import Data.Aeson (FromJSON, ToJSON (..), Value (String),
parseJSON, withText)
import Data.Aeson.Encoding (unsafeToEncoding)
import Data.Binary (Binary (..))
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.ByteString.Builder (char7)
import Data.Bytes.Get
import Data.Bytes.Put
import Data.Bytes.Serial
import Data.Hashable
import Data.Maybe (fromMaybe)
import Data.Serialize (Serialize (..))
import Data.String (IsString, fromString)
import Data.String.Conversions (cs)
import GHC.Generics (Generic)
import Haskoin.Address.Base58
import Haskoin.Constants
import Haskoin.Crypto.Hash
import Haskoin.Util
-- | Elliptic curve public key type with expected serialized compression flag.
data PubKeyI = PubKeyI
{ pubKeyPoint :: !PubKey
, pubKeyCompressed :: !Bool
} deriving (Generic, Eq, Show, Read, Hashable, NFData)
instance IsString PubKeyI where
fromString str =
fromMaybe e $ eitherToMaybe . runGetS deserialize <=< decodeHex $ cs str
where
e = error "Could not decode public key"
instance ToJSON PubKeyI where
toJSON = String . encodeHex . runPutS . serialize
toEncoding s = unsafeToEncoding $
char7 '"' <>
hexBuilder (runPutL (serialize s)) <>
char7 '"'
instance FromJSON PubKeyI where
parseJSON = withText "PubKeyI" $
maybe mzero return . (eitherToMaybe . runGetS deserialize =<<) . decodeHex
instance Serial PubKeyI where
deserialize = s >>= \case
True -> c
False -> u
where
s = lookAhead $ getWord8 >>= \case
0x02 -> return True
0x03 -> return True
0x04 -> return False
_ -> fail "Not a public key"
c = do
bs <- getByteString 33
maybe (fail "Could not decode public key") return $
PubKeyI <$> importPubKey bs <*> pure True
u = do
bs <- getByteString 65
maybe (fail "Could not decode public key") return $
PubKeyI <$> importPubKey bs <*> pure False
serialize pk = putByteString $ exportPubKey (pubKeyCompressed pk) (pubKeyPoint pk)
instance Serialize PubKeyI where
put = serialize
get = deserialize
instance Binary PubKeyI where
put = serialize
get = deserialize
-- | Wrap a public key from secp256k1 library adding information about compression.
wrapPubKey :: Bool -> PubKey -> PubKeyI
wrapPubKey c p = PubKeyI p c
-- | Derives a public key from a private key. This function will preserve
-- compression flag.
derivePubKeyI :: SecKeyI -> PubKeyI
derivePubKeyI (SecKeyI d c) = PubKeyI (derivePubKey d) c
-- | Tweak a public key.
tweakPubKey :: PubKey -> Hash256 -> Maybe PubKey
tweakPubKey p h = tweakAddPubKey p =<< tweak (runPutS (serialize h))
-- | Elliptic curve private key type with expected public key compression
-- information. Compression information is stored in private key WIF formats and
-- needs to be preserved to generate the correct address from the corresponding
-- public key.
data SecKeyI = SecKeyI
{ secKeyData :: !SecKey
, secKeyCompressed :: !Bool
} deriving (Eq, Show, Read, Generic, NFData)
-- | Wrap private key with corresponding public key compression flag.
wrapSecKey :: Bool -> SecKey -> SecKeyI
wrapSecKey c d = SecKeyI d c
-- | Tweak a private key.
tweakSecKey :: SecKey -> Hash256 -> Maybe SecKey
tweakSecKey key h = tweakAddSecKey key =<< tweak (runPutS (serialize h))
-- | Decode Casascius mini private keys (22 or 30 characters).
fromMiniKey :: ByteString -> Maybe SecKeyI
fromMiniKey bs = do
guard checkShortKey
wrapSecKey False <$> secKey (runPutS (serialize (sha256 bs)))
where
checkHash = runPutS $ serialize $ sha256 $ bs `BS.append` "?"
checkShortKey = BS.length bs `elem` [22, 30] && BS.head checkHash == 0x00
-- | Decode private key from WIF (wallet import format) string.
fromWif :: Network -> Base58 -> Maybe SecKeyI
fromWif net wif = do
bs <- decodeBase58Check wif
-- Check that this is a private key
guard (BS.head bs == getSecretPrefix net)
case BS.length bs of
-- Uncompressed format
33 -> wrapSecKey False <$> secKey (BS.tail bs)
-- Compressed format
34 -> do
guard $ BS.last bs == 0x01
wrapSecKey True <$> secKey (BS.tail $ BS.init bs)
-- Bad length
_ -> Nothing
-- | Encode private key into a WIF string.
toWif :: Network -> SecKeyI -> Base58
toWif net (SecKeyI k c) =
encodeBase58Check . BS.cons (getSecretPrefix net) $
if c
then getSecKey k `BS.snoc` 0x01
else getSecKey k
| haskoin/haskoin | src/Haskoin/Keys/Common.hs | unlicense | 5,939 | 0 | 17 | 1,690 | 1,341 | 717 | 624 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
module MyTypeFamilies where
import Data.IORef
import Control.Applicative
import Data.Foldable (forM_)
import Control.Concurrent.STM
import Control.Concurrent.MVar
class IOStore store where
newIO :: a -> IO (store a)
getIO :: store a -> IO a
putIO :: store a -> a -> IO ()
instance IOStore MVar where
newIO = newMVar
getIO = readMVar
putIO mvar a = modifyMVar_ mvar (return . const a)
instance IOStore IORef where
newIO = newIORef
getIO = readIORef
putIO ref a = modifyIORef' ref (const a)
type Present = String
storePresentsIO :: IOStore s => [Present] -> IO (s [Present])
storePresentsIO ps = do
store <- newIO []
forM_ ps $ \x -> do
old <- getIO store
putIO store (x : old)
return store
class Store store where
type StoreMonad store :: * -> *
new :: a -> (StoreMonad store) (store a)
get :: store a -> (StoreMonad store) a
put :: store a -> a -> (StoreMonad store) ()
instance Store IORef where
type StoreMonad IORef = IO
new = newIORef
get = readIORef
put ref a = modifyIORef ref (const a)
| songpp/my-haskell-playground | src/MyTypeFamilies.hs | apache-2.0 | 1,143 | 0 | 13 | 253 | 427 | 218 | 209 | 39 | 1 |
addThree :: Int -> Int -> Int -> Int
addThree x y z = x + y + z
addThree' :: Int -> Int -> Int -> Int
addThree' = \x -> \y -> \z -> x + y + z
| EricYT/real-world | src/chapter-13/addThree.hs | apache-2.0 | 144 | 0 | 9 | 43 | 83 | 44 | 39 | 4 | 1 |
{-# LANGUAGE PackageImports #-}
-- | A generic caching interface.
--
-- The intent is to support many concrete implementations,
-- and to use specify caching policies using combinators.
--
-- Note that even though we _support_ many concrete implementations,
-- for simplicity we only provide one based on an association-list.
module Data.Cache where
import Control.Monad
import "mtl" Control.Monad.Trans
import Control.Monad.Trans.State
import Data.Maybe
-- $setup
-- >>> let verboseLength xs = liftIO (putStrLn xs) >> return (length xs)
-- >>> let cachedLength c xs = cached c xs (verboseLength xs)
-- >>> let testC c = mapM (cachedLength c) (words "one two one two testing testing")
-- Implementation notes: the `m` is a monad transformer stack, mostly StateT's,
-- holding the state of the cache. The combinators extend caches by adding more
-- state and code around the base object. This is analogous to the Decorator
-- pattern in OO, except each modification to `m` is visible in the type.
data Cache m k a = Cache
{ readCache :: k -> m (Maybe a)
, writeCache :: k -> a -> m Bool -- ^ False if full
, clearCache :: m ()
, clearFromCache :: k -> m ()
}
-- | Tries to avoid executing this computation in the future by storing it in
-- the cache.
cached :: Monad m => Cache m k a -> k -> m a -> m a
cached c k computeSlowly = do
r <- readCache c k
case r of
Just x -> return x
Nothing -> do
x <- computeSlowly
_ <- writeCache c k x
return x
-- implementations
-- | A dummy cache which never caches anything.
--
-- Semantically equivalent to `finiteCache 0 $ assocCache`, except for the `m`.
--
-- >>> withNullCache testC
-- one
-- two
-- one
-- two
-- testing
-- testing
-- [3,3,3,3,7,7]
nullCache :: Monad m => Cache m k a
nullCache = Cache
{ readCache = \_ -> return Nothing
, writeCache = \_ _ -> return False -- always full!
, clearCache = return ()
, clearFromCache = \_ -> return ()
}
withNullCache :: Monad m => (Cache m k v -> m a) -> m a
withNullCache body = body nullCache
-- | A very inefficient example implementation.
--
-- >>> withAssocCache testC
-- one
-- two
-- testing
-- [3,3,3,3,7,7]
assocCache :: (Monad m, Eq k) => Cache (StateT [(k,a)] m) k a
assocCache = Cache
{ readCache = \k -> liftM (lookup k) $ get
, writeCache = \k v -> modify ((k,v):)
>> return True -- never full.
, clearCache = put []
, clearFromCache = \k -> modify $ filter $ (/= k) . fst
}
withAssocCache :: (Monad m, Eq k)
=> (Cache (StateT [(k,v)] m) k v -> StateT [(k,v)] m a)
-> m a
withAssocCache body = evalStateT (body assocCache) []
-- decorators
-- | Only cache the first `n` requests (use n=-1 for unlimited).
-- Combine with a cache policy in order to reuse those `n` slots.
--
-- >>> withAssocCache $ withFiniteCache 2 $ testC
-- one
-- two
-- testing
-- testing
-- [3,3,3,3,7,7]
--
-- >>> withAssocCache $ withFiniteCache 1 $ testC
-- one
-- two
-- two
-- testing
-- testing
-- [3,3,3,3,7,7]
--
-- >>> withAssocCache $ withFiniteCache 0 $ testC
-- one
-- two
-- one
-- two
-- testing
-- testing
-- [3,3,3,3,7,7]
--
-- >>> withAssocCache $ withFiniteCache (-1) $ testC
-- one
-- two
-- testing
-- [3,3,3,3,7,7]
finiteCache :: Monad m => Int -> Cache m k a -> Cache (StateT Int m) k a
finiteCache n c = Cache
{ readCache = \k -> (lift $ readCache c k )
, writeCache = \k v -> do
alreadyFull <- isFull
if alreadyFull
then return False
else do
r <- lift $ writeCache c k v
when r incr
return r
, clearCache = put 0 >> (lift $ clearCache c )
, clearFromCache = \k -> decr >> (lift $ clearFromCache c k )
}
where
isFull = liftM (== n) $ get
incr = modify (+1)
decr = modify (subtract 1)
withFiniteCache :: Monad m
=> Int
-> (Cache (StateT Int m) k v -> StateT Int m a)
-> (Cache m k v -> m a)
withFiniteCache n body c = evalStateT (body $ finiteCache n c) 0
-- | An example cache-policy implementation: Least-Recently-Used.
--
-- >>> withAssocCache $ withFiniteCache 2 $ withLruCache testC
-- one
-- two
-- testing
-- [3,3,3,3,7,7]
--
-- >>> withAssocCache $ withFiniteCache 1 $ withLruCache testC
-- one
-- two
-- one
-- two
-- testing
-- [3,3,3,3,7,7]
--
-- >>> withAssocCache $ withFiniteCache 0 $ withLruCache testC
-- one
-- two
-- one
-- two
-- testing
-- testing
-- [3,3,3,3,7,7]
lruCache :: (Monad m, Eq k) => Cache m k a -> Cache (StateT [k] m) k a
lruCache c = Cache
{ readCache = \k -> do
r <- lift $ readCache c k
when (isJust r) $ touch k
return r
, writeCache = \k v -> do
r <- lift $ writeCache c k v
if r
then return True
else do
makeRoom
lift $ writeCache c k v
, clearCache = (lift $ clearCache c )
, clearFromCache = \k -> remove k >> (lift $ clearFromCache c k )
}
where
touch k = write k
makeRoom = do
mostRecentlyUsed <- get
case mostRecentlyUsed of
(k:ks) -> do
put ks
lift $ clearFromCache c k
[] -> do
-- the cache is both full and empty? try to reset.
lift $ clearCache c
write k = modify (k:)
remove k = modify $ filter (/= k)
withLruCache :: (Monad m, Eq k)
=> (Cache (StateT [k] m) k v -> StateT [k] m a)
-> (Cache m k v -> m a)
withLruCache body c = evalStateT (body $ lruCache c) []
-- | An extreme version of the LRU strategy.
--
-- Semantically equivalent to `lruCache . finiteCache 1`, except for the `m`.
--
-- >>> withAssocCache $ withSingletonCache testC
-- one
-- two
-- one
-- two
-- testing
-- [3,3,3,3,7,7]
singletonCache :: (Monad m, Eq k) => Cache m k a -> Cache m k a
singletonCache c = c { writeCache = go }
where
go k mx = clearCache c >> writeCache c k mx
withSingletonCache :: (Monad m, Eq k)
=> (Cache m k v -> m a)
-> (Cache m k v -> m a)
withSingletonCache body c = body (singletonCache c)
| gelisam/hawk | src/Data/Cache.hs | apache-2.0 | 6,323 | 0 | 15 | 1,884 | 1,624 | 881 | 743 | 97 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-| Unittests for the job queue functionality.
-}
{-
Copyright (C) 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.JQueue (testJQueue) where
import Control.Monad (when)
import Data.Char (isAscii)
import Data.List (nub, sort)
import System.Directory
import System.FilePath
import System.IO.Temp
import System.Posix.Files
import Test.HUnit
import Test.QuickCheck as QuickCheck
import Test.QuickCheck.Monadic
import Text.JSON
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Test.Ganeti.Types ()
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import Ganeti.JQueue
import Ganeti.OpCodes
import Ganeti.Path
import Ganeti.Types as Types
import Test.Ganeti.JQueue.Objects (justNoTs, genQueuedOpCode, emptyJob,
genJobId)
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Test cases
-- | Tests default priority value.
case_JobPriorityDef :: Assertion
case_JobPriorityDef = do
ej <- emptyJob
assertEqual "for default priority" C.opPrioDefault $ calcJobPriority ej
-- | Test arbitrary priorities.
prop_JobPriority :: Property
prop_JobPriority =
forAll (listOf1 (genQueuedOpCode `suchThat`
(not . opStatusFinalized . qoStatus)))
$ \ops -> property $ do
jid0 <- makeJobId 0
let job = QueuedJob jid0 ops justNoTs justNoTs justNoTs Nothing Nothing
return $ calcJobPriority job ==? minimum (map qoPriority ops) :: Gen Property
-- | Tests default job status.
case_JobStatusDef :: Assertion
case_JobStatusDef = do
ej <- emptyJob
assertEqual "for job status" JOB_STATUS_SUCCESS $ calcJobStatus ej
-- | Test some job status properties.
prop_JobStatus :: Property
prop_JobStatus =
forAll genJobId $ \jid ->
forAll genQueuedOpCode $ \op ->
let job1 = QueuedJob jid [op] justNoTs justNoTs justNoTs Nothing Nothing
st1 = calcJobStatus job1
op_succ = op { qoStatus = OP_STATUS_SUCCESS }
op_err = op { qoStatus = OP_STATUS_ERROR }
op_cnl = op { qoStatus = OP_STATUS_CANCELING }
op_cnd = op { qoStatus = OP_STATUS_CANCELED }
-- computes status for a job with an added opcode before
st_pre_op pop = calcJobStatus (job1 { qjOps = pop:qjOps job1 })
-- computes status for a job with an added opcode after
st_post_op pop = calcJobStatus (job1 { qjOps = qjOps job1 ++ [pop] })
in conjoin
[ counterexample "pre-success doesn't change status"
(st_pre_op op_succ ==? st1)
, counterexample "post-success doesn't change status"
(st_post_op op_succ ==? st1)
, counterexample "pre-error is error"
(st_pre_op op_err ==? JOB_STATUS_ERROR)
, counterexample "pre-canceling is canceling"
(st_pre_op op_cnl ==? JOB_STATUS_CANCELING)
, counterexample "pre-canceled is canceled"
(st_pre_op op_cnd ==? JOB_STATUS_CANCELED)
]
-- | Tests job status equivalence with Python. Very similar to OpCodes test.
case_JobStatusPri_py_equiv :: Assertion
case_JobStatusPri_py_equiv = do
let num_jobs = 2000::Int
jobs <- genSample (vectorOf num_jobs $ do
num_ops <- choose (1, 5)
ops <- vectorOf num_ops genQueuedOpCode
jid <- genJobId
return $ QueuedJob jid ops justNoTs justNoTs justNoTs
Nothing Nothing)
let serialized = encode jobs
-- check for non-ASCII fields, usually due to 'arbitrary :: String'
mapM_ (\job -> when (any (not . isAscii) (encode job)) .
assertFailure $ "Job has non-ASCII fields: " ++ show job
) jobs
py_stdout <-
runPython "from ganeti import jqueue\n\
\from ganeti import serializer\n\
\import sys\n\
\job_data = serializer.Load(sys.stdin.read())\n\
\decoded = [jqueue._QueuedJob.Restore(None, o, False, False)\n\
\ for o in job_data]\n\
\encoded = [(job.CalcStatus(), job.CalcPriority())\n\
\ for job in decoded]\n\
\sys.stdout.buffer.write(serializer.Dump(encoded))" serialized
>>= checkPythonResult
let deserialised = decode py_stdout::Text.JSON.Result [(String, Int)]
decoded <- case deserialised of
Text.JSON.Ok jobs' -> return jobs'
Error msg ->
assertFailure ("Unable to decode jobs: " ++ msg)
-- this already raised an exception, but we need it
-- for proper types
>> fail "Unable to decode jobs"
assertEqual "Mismatch in number of returned jobs"
(length decoded) (length jobs)
mapM_ (\(py_sp, job) ->
let hs_sp = (jobStatusToRaw $ calcJobStatus job,
calcJobPriority job)
in assertEqual ("Different result after encoding/decoding for " ++
show job) hs_sp py_sp
) $ zip decoded jobs
-- | Tests listing of Job ids.
prop_ListJobIDs :: Property
prop_ListJobIDs = monadicIO $ do
let extractJobIDs :: (Show e, Monad m) => m (GenericResult e a) -> m a
extractJobIDs = (>>= genericResult (fail . show) return)
jobs <- pick $ resize 10 (listOf1 genJobId `suchThat` (\l -> l == nub l))
(e, f, g) <-
run . withSystemTempDirectory "jqueue-test-ListJobIDs." $ \tempdir -> do
empty_dir <- extractJobIDs $ getJobIDs [tempdir]
mapM_ (\jid -> writeFile (tempdir </> jobFileName jid) "") jobs
full_dir <- extractJobIDs $ getJobIDs [tempdir]
invalid_dir <- getJobIDs [tempdir </> "no-such-dir"]
return (empty_dir, sortJobIDs full_dir, invalid_dir)
_ <- stop $ conjoin [ counterexample "empty directory" $ e ==? []
, counterexample "directory with valid names" $
f ==? sortJobIDs jobs
, counterexample "invalid directory" $ isBad g
]
return ()
-- | Tests loading jobs from disk.
prop_LoadJobs :: Property
prop_LoadJobs = monadicIO $ do
ops <- pick $ resize 5 (listOf1 genQueuedOpCode)
jid <- pick genJobId
let job = QueuedJob jid ops justNoTs justNoTs justNoTs Nothing Nothing
job_s = encode job
-- check that jobs in the right directories are parsed correctly
(missing, current, archived, missing_current, broken) <-
run . withSystemTempDirectory "jqueue-test-LoadJobs." $ \tempdir -> do
let load a = loadJobFromDisk tempdir a jid
live_path = liveJobFile tempdir jid
arch_path = archivedJobFile tempdir jid
createDirectory $ tempdir </> jobQueueArchiveSubDir
createDirectory $ dropFileName arch_path
-- missing job
missing <- load True
writeFile live_path job_s
-- this should exist
current <- load False
removeFile live_path
writeFile arch_path job_s
-- this should exist (archived)
archived <- load True
-- this should be missing
missing_current <- load False
removeFile arch_path
writeFile live_path "invalid job"
broken <- load True
return (missing, current, archived, missing_current, broken)
_ <- stop $ conjoin [ missing ==? noSuchJob
, current ==? Ganeti.BasicTypes.Ok (job, False)
, archived ==? Ganeti.BasicTypes.Ok (job, True)
, missing_current ==? noSuchJob
, counterexample "broken job" (isBad broken)
]
return ()
-- | Tests computing job directories. Creates random directories,
-- files and stale symlinks in a directory, and checks that we return
-- \"the right thing\".
prop_DetermineDirs :: Property
prop_DetermineDirs = monadicIO $ do
count <- pick $ choose (2, 10)
nums <- pick $ genUniquesList count
(arbitrary::Gen (QuickCheck.Positive Int))
let (valid, invalid) = splitAt (count `div` 2) $
map (\(QuickCheck.Positive i) -> show i) nums
(tempdir, non_arch, with_arch, invalid_root) <-
run . withSystemTempDirectory "jqueue-test-DetermineDirs." $ \tempdir -> do
let arch_dir = tempdir </> jobQueueArchiveSubDir
createDirectory arch_dir
mapM_ (createDirectory . (arch_dir </>)) valid
mapM_ (\p -> writeFile (arch_dir </> p) "") invalid
mapM_ (\p -> createSymbolicLink "/dev/null/no/such/file"
(arch_dir </> p <.> "missing")) invalid
non_arch <- determineJobDirectories tempdir False
with_arch <- determineJobDirectories tempdir True
invalid_root <- determineJobDirectories (tempdir </> "no-such-subdir") True
return (tempdir, non_arch, with_arch, invalid_root)
let arch_dir = tempdir </> jobQueueArchiveSubDir
_ <- stop $ conjoin [ non_arch ==? [tempdir]
, sort with_arch ==?
sort (tempdir:map (arch_dir </>) valid)
, invalid_root ==? [tempdir </> "no-such-subdir"]
]
return ()
-- | Tests the JSON serialisation for 'InputOpCode'.
prop_InputOpCode :: MetaOpCode -> Int -> Property
prop_InputOpCode meta i =
conjoin [ readJSON (showJSON valid) ==? Text.JSON.Ok valid
, readJSON (showJSON invalid) ==? Text.JSON.Ok invalid
]
where valid = ValidOpCode meta
invalid = InvalidOpCode (showJSON i)
-- | Tests 'extractOpSummary'.
prop_extractOpSummary :: MetaOpCode -> Int -> Property
prop_extractOpSummary meta i =
conjoin [ counterexample "valid opcode" $
extractOpSummary (ValidOpCode meta) ==? summary
, counterexample "invalid opcode, correct object" $
extractOpSummary (InvalidOpCode jsobj) ==? summary
, counterexample "invalid opcode, empty object" $
extractOpSummary (InvalidOpCode emptyo) ==? invalid
, counterexample "invalid opcode, object with invalid OP_ID" $
extractOpSummary (InvalidOpCode invobj) ==? invalid
, counterexample "invalid opcode, not jsobject" $
extractOpSummary (InvalidOpCode jsinval) ==? invalid
]
where summary = opSummary (metaOpCode meta)
jsobj = showJSON $ toJSObject [("OP_ID",
showJSON ("OP_" ++ summary))]
emptyo = showJSON $ toJSObject ([]::[(String, JSValue)])
invobj = showJSON $ toJSObject [("OP_ID", showJSON False)]
jsinval = showJSON i
invalid = "INVALID_OP"
testSuite "JQueue"
[ 'case_JobPriorityDef
, 'prop_JobPriority
, 'case_JobStatusDef
, 'prop_JobStatus
, 'case_JobStatusPri_py_equiv
, 'prop_ListJobIDs
, 'prop_LoadJobs
, 'prop_DetermineDirs
, 'prop_InputOpCode
, 'prop_extractOpSummary
]
| mbakke/ganeti | test/hs/Test/Ganeti/JQueue.hs | bsd-2-clause | 12,016 | 0 | 19 | 3,121 | 2,505 | 1,289 | 1,216 | 203 | 2 |
module Handler.Profile
( getProfileR
, getEditProfileR
, postEditProfileR
) where
import Import
import Yesod.Auth -- TODO
import Yesod.Comments.Management -- TODO
import Data.Maybe (fromMaybe)
import Helpers.Profile
import Network.Gravatar
getProfileR :: Handler RepHtml
getProfileR = do
(Entity _ user) <- requireAuth
let username = fromMaybe "" $ userName user
let email = fromMaybe "" $ userEmail user
let pic = gravatar gravatarOpts email
defaultLayout $ do
setTitle "View profile"
$(widgetFile "profile/show")
where
gravatarOpts :: GravatarOptions
gravatarOpts = defaultConfig
{ gSize = Just $ Size 128
, gDefault = Just MM
}
getEditProfileR :: Handler RepHtml
getEditProfileR = do
(Entity _ u) <- requireAuth
((_, form), enctype) <- runFormPost $ profileForm u
defaultLayout $ do
setTitle "Edit profile"
$(widgetFile "profile/edit")
postEditProfileR :: Handler RepHtml
postEditProfileR = do
(Entity uid u) <- requireAuth
((res, _ ), _ ) <- runFormPost $ profileForm u
case res of
FormSuccess ef -> saveProfile uid ef
_ -> return ()
getEditProfileR
| pbrisbin/devsite | Handler/Profile.hs | bsd-2-clause | 1,269 | 0 | 12 | 368 | 358 | 179 | 179 | 38 | 2 |
module BrownPLT.TypedJS.ReachableStatements
( unreachableStatements
) where
import BrownPLT.TypedJS.Prelude
import qualified Data.Set as S
import qualified Data.Graph.Inductive as G
import Data.Graph.Inductive.Query.BFS (bfs)
import BrownPLT.JavaScript.Analysis
import BrownPLT.JavaScript.Analysis.Intraprocedural
import BrownPLT.JavaScript.Analysis.ANF
import qualified BrownPLT.TypedJS.Syntax as Stx
unreachableInGraph :: Graph -> [Stmt (Int, SourcePos)]
unreachableInGraph gr = map lab (S.toList nodes)
where nodes = (S.fromList $ G.nodes gr) `S.difference`
(S.fromList $ bfs (fst $ G.nodeRange gr) gr)
lab node = case G.lab gr node of
Just s -> s
Nothing -> error "unreachableInGraph: node without a statement"
unreachableStatements :: ([(Id, SourcePos)], [Stmt SourcePos])
-> [SourcePos]
unreachableStatements anf = map (snd.label) unreachableInANF
where unreachableInANF = concatMap unreachableInGraph graphs
(_, tree) = allIntraproceduralGraphs anf
graphs = map (\(_, _, gr) -> gr) (flatten tree)
| brownplt/strobe-old | src/BrownPLT/TypedJS/ReachableStatements.hs | bsd-2-clause | 1,101 | 0 | 14 | 214 | 322 | 185 | 137 | 23 | 2 |
module Distribution.Client.Dependency.Modular.Preference where
-- Reordering or pruning the tree in order to prefer or make certain choices.
import qualified Data.List as L
import qualified Data.Map as M
import Data.Monoid
import Data.Ord
import Distribution.Client.Dependency.Types
( PackageConstraint(..), PackagePreferences(..), InstalledPreference(..) )
import Distribution.Client.Types
( OptionalStanza(..) )
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.PSQ as P
import Distribution.Client.Dependency.Modular.Tree
import Distribution.Client.Dependency.Modular.Version
-- | Generic abstraction for strategies that just rearrange the package order.
-- Only packages that match the given predicate are reordered.
packageOrderFor :: (PN -> Bool) -> (PN -> I -> I -> Ordering) -> Tree a -> Tree a
packageOrderFor p cmp = trav go
where
go (PChoiceF v@(Q _ pn) r cs)
| p pn = PChoiceF v r (P.sortByKeys (flip (cmp pn)) cs)
| otherwise = PChoiceF v r cs
go x = x
-- | Ordering that treats preferred versions as greater than non-preferred
-- versions.
preferredVersionsOrdering :: VR -> Ver -> Ver -> Ordering
preferredVersionsOrdering vr v1 v2 =
compare (checkVR vr v1) (checkVR vr v2)
-- | Traversal that tries to establish package preferences (not constraints).
-- Works by reordering choice nodes.
preferPackagePreferences :: (PN -> PackagePreferences) -> Tree a -> Tree a
preferPackagePreferences pcs = packageOrderFor (const True) preference
where
preference pn i1@(I v1 _) i2@(I v2 _) =
let PackagePreferences vr ipref = pcs pn
in preferredVersionsOrdering vr v1 v2 `mappend` -- combines lexically
locationsOrdering ipref i1 i2
-- Note that we always rank installed before uninstalled, and later
-- versions before earlier, but we can change the priority of the
-- two orderings.
locationsOrdering PreferInstalled v1 v2 =
preferInstalledOrdering v1 v2 `mappend` preferLatestOrdering v1 v2
locationsOrdering PreferLatest v1 v2 =
preferLatestOrdering v1 v2 `mappend` preferInstalledOrdering v1 v2
-- | Ordering that treats installed instances as greater than uninstalled ones.
preferInstalledOrdering :: I -> I -> Ordering
preferInstalledOrdering (I _ (Inst _)) (I _ (Inst _)) = EQ
preferInstalledOrdering (I _ (Inst _)) _ = GT
preferInstalledOrdering _ (I _ (Inst _)) = LT
preferInstalledOrdering _ _ = EQ
-- | Compare instances by their version numbers.
preferLatestOrdering :: I -> I -> Ordering
preferLatestOrdering (I v1 _) (I v2 _) = compare v1 v2
-- | Helper function that tries to enforce a single package constraint on a
-- given instance for a P-node. Translates the constraint into a
-- tree-transformer that either leaves the subtree untouched, or replaces it
-- with an appropriate failure node.
processPackageConstraintP :: ConflictSet QPN -> I -> PackageConstraint -> Tree a -> Tree a
processPackageConstraintP c (I v _) (PackageConstraintVersion _ vr) r
| checkVR vr v = r
| otherwise = Fail c (GlobalConstraintVersion vr)
processPackageConstraintP c i (PackageConstraintInstalled _) r
| instI i = r
| otherwise = Fail c GlobalConstraintInstalled
processPackageConstraintP c i (PackageConstraintSource _) r
| not (instI i) = r
| otherwise = Fail c GlobalConstraintSource
processPackageConstraintP _ _ _ r = r
-- | Helper function that tries to enforce a single package constraint on a
-- given flag setting for an F-node. Translates the constraint into a
-- tree-transformer that either leaves the subtree untouched, or replaces it
-- with an appropriate failure node.
processPackageConstraintF :: Flag -> ConflictSet QPN -> Bool -> PackageConstraint -> Tree a -> Tree a
processPackageConstraintF f c b' (PackageConstraintFlags _ fa) r =
case L.lookup f fa of
Nothing -> r
Just b | b == b' -> r
| otherwise -> Fail c GlobalConstraintFlag
processPackageConstraintF _ _ _ _ r = r
-- | Helper function that tries to enforce a single package constraint on a
-- given flag setting for an F-node. Translates the constraint into a
-- tree-transformer that either leaves the subtree untouched, or replaces it
-- with an appropriate failure node.
processPackageConstraintS :: OptionalStanza -> ConflictSet QPN -> Bool -> PackageConstraint -> Tree a -> Tree a
processPackageConstraintS s c b' (PackageConstraintStanzas _ ss) r =
if not b' && s `elem` ss then Fail c GlobalConstraintFlag
else r
processPackageConstraintS _ _ _ _ r = r
-- | Traversal that tries to establish various kinds of user constraints. Works
-- by selectively disabling choices that have been ruled out by global user
-- constraints.
enforcePackageConstraints :: M.Map PN [PackageConstraint] -> Tree QGoalReasons -> Tree QGoalReasons
enforcePackageConstraints pcs = trav go
where
go (PChoiceF qpn@(Q _ pn) gr ts) =
let c = toConflictSet (Goal (P qpn) gr)
-- compose the transformation functions for each of the relevant constraint
g = \ i -> foldl (\ h pc -> h . processPackageConstraintP c i pc) id
(M.findWithDefault [] pn pcs)
in PChoiceF qpn gr (P.mapWithKey g ts)
go (FChoiceF qfn@(FN (PI (Q _ pn) _) f) gr tr ts) =
let c = toConflictSet (Goal (F qfn) gr)
-- compose the transformation functions for each of the relevant constraint
g = \ b -> foldl (\ h pc -> h . processPackageConstraintF f c b pc) id
(M.findWithDefault [] pn pcs)
in FChoiceF qfn gr tr (P.mapWithKey g ts)
go (SChoiceF qsn@(SN (PI (Q _ pn) _) f) gr tr ts) =
let c = toConflictSet (Goal (S qsn) gr)
-- compose the transformation functions for each of the relevant constraint
g = \ b -> foldl (\ h pc -> h . processPackageConstraintS f c b pc) id
(M.findWithDefault [] pn pcs)
in SChoiceF qsn gr tr (P.mapWithKey g ts)
go x = x
-- | Prefer installed packages over non-installed packages, generally.
-- All installed packages or non-installed packages are treated as
-- equivalent.
preferInstalled :: Tree a -> Tree a
preferInstalled = packageOrderFor (const True) (const preferInstalledOrdering)
-- | Prefer packages with higher version numbers over packages with
-- lower version numbers, for certain packages.
preferLatestFor :: (PN -> Bool) -> Tree a -> Tree a
preferLatestFor p = packageOrderFor p (const preferLatestOrdering)
-- | Prefer packages with higher version numbers over packages with
-- lower version numbers, for all packages.
preferLatest :: Tree a -> Tree a
preferLatest = preferLatestFor (const True)
-- | Require installed packages.
requireInstalled :: (PN -> Bool) -> Tree (QGoalReasons, a) -> Tree (QGoalReasons, a)
requireInstalled p = trav go
where
go (PChoiceF v@(Q _ pn) i@(gr, _) cs)
| p pn = PChoiceF v i (P.mapWithKey installed cs)
| otherwise = PChoiceF v i cs
where
installed (I _ (Inst _)) x = x
installed _ _ = Fail (toConflictSet (Goal (P v) gr)) CannotInstall
go x = x
-- | Avoid reinstalls.
--
-- This is a tricky strategy. If a package version is installed already and the
-- same version is available from a repo, the repo version will never be chosen.
-- This would result in a reinstall (either destructively, or potentially,
-- shadowing). The old instance won't be visible or even present anymore, but
-- other packages might have depended on it.
--
-- TODO: It would be better to actually check the reverse dependencies of installed
-- packages. If they're not depended on, then reinstalling should be fine. Even if
-- they are, perhaps this should just result in trying to reinstall those other
-- packages as well. However, doing this all neatly in one pass would require to
-- change the builder, or at least to change the goal set after building.
avoidReinstalls :: (PN -> Bool) -> Tree (QGoalReasons, a) -> Tree (QGoalReasons, a)
avoidReinstalls p = trav go
where
go (PChoiceF qpn@(Q _ pn) i@(gr, _) cs)
| p pn = PChoiceF qpn i disableReinstalls
| otherwise = PChoiceF qpn i cs
where
disableReinstalls =
let installed = [ v | (I v (Inst _), _) <- toList cs ]
in P.mapWithKey (notReinstall installed) cs
notReinstall vs (I v InRepo) _
| v `elem` vs = Fail (toConflictSet (Goal (P qpn) gr)) CannotReinstall
notReinstall _ _ x = x
go x = x
-- | Always choose the first goal in the list next, abandoning all
-- other choices.
--
-- This is unnecessary for the default search strategy, because
-- it descends only into the first goal choice anyway,
-- but may still make sense to just reduce the tree size a bit.
firstGoal :: Tree a -> Tree a
firstGoal = trav go
where
go (GoalChoiceF xs) = casePSQ xs (GoalChoiceF xs) (\ _ t _ -> out t)
go x = x
-- Note that we keep empty choice nodes, because they mean success.
-- | Transformation that tries to make a decision on base as early as
-- possible. In nearly all cases, there's a single choice for the base
-- package. Also, fixing base early should lead to better error messages.
preferBaseGoalChoice :: Tree a -> Tree a
preferBaseGoalChoice = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortByKeys preferBase xs)
go x = x
preferBase :: OpenGoal -> OpenGoal -> Ordering
preferBase (OpenGoal (Simple (Dep (Q [] pn) _)) _) _ | unPN pn == "base" = LT
preferBase _ (OpenGoal (Simple (Dep (Q [] pn) _)) _) | unPN pn == "base" = GT
preferBase _ _ = EQ
-- | Transformation that sorts choice nodes so that
-- child nodes with a small branching degree are preferred. As a
-- special case, choices with 0 branches will be preferred (as they
-- are immediately considered inconsistent), and choices with 1
-- branch will also be preferred (as they don't involve choice).
preferEasyGoalChoices :: Tree a -> Tree a
preferEasyGoalChoices = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortBy (comparing choices) xs)
go x = x
-- | Transformation that tries to avoid making inconsequential
-- flag choices early.
deferDefaultFlagChoices :: Tree a -> Tree a
deferDefaultFlagChoices = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortBy defer xs)
go x = x
defer :: Tree a -> Tree a -> Ordering
defer (FChoice _ _ True _) _ = GT
defer _ (FChoice _ _ True _) = LT
defer _ _ = EQ
-- | Variant of 'preferEasyGoalChoices'.
--
-- Only approximates the number of choices in the branches. Less accurate,
-- more efficient.
lpreferEasyGoalChoices :: Tree a -> Tree a
lpreferEasyGoalChoices = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortBy (comparing lchoices) xs)
go x = x
-- | Variant of 'preferEasyGoalChoices'.
--
-- I first thought that using a paramorphism might be faster here,
-- but it doesn't seem to make any difference.
preferEasyGoalChoices' :: Tree a -> Tree a
preferEasyGoalChoices' = para (inn . go)
where
go (GoalChoiceF xs) = GoalChoiceF (P.map fst (P.sortBy (comparing (choices . snd)) xs))
go x = fmap fst x
| alphaHeavy/cabal | cabal-install/Distribution/Client/Dependency/Modular/Preference.hs | bsd-3-clause | 11,809 | 0 | 19 | 2,930 | 2,830 | 1,456 | 1,374 | 140 | 4 |
module Scurry.NetTask (
NetMsg(..),
netTask,
) where
import Prelude hiding (catch)
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import qualified Data.ByteString.Lazy as B
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import Network.Socket.ByteString
import Scurry.Crypto
import Scurry.Data.Network
import Scurry.Scurry
data NetMsg = Reject
| Join { pkey :: ScurryPubKey }
| Accept { pkey :: ScurryPubKey, skey :: EncKey }
| EncMsg { emsg :: B.ByteString }
deriving (Show)
netDbg :: String -> IO ()
-- netDbg = putStrLn
netDbg _ = do return ()
delayOneSec :: IO ()
delayOneSec = threadDelay (1 * 1000000)
-- | Kicks off read/write tasks on a socket
netTask :: MVar Scurry -> MVar () -> IO ()
netTask s _ = do
sck <- (readMVar s) >>= prepSocket
(r,w) <- spawn sck
catch (forever idle) $ \e -> do
let err = show (e :: SomeException)
netDbg $ "netTask: " ++ err
throwTo r e
throwTo w e
where
spawn sck = do
r <- forkIO $ netRead s sck
w <- forkIO $ netWrite s sck
return (r,w)
idle = do
delayOneSec
netDbg "netTask: tick"
netRead :: MVar Scurry -> Socket -> IO ()
netRead _ sck = genericCatch "netRead" (forever go)
where
go = netDbg "netRead: read" >> recvFrom sck 1600
netWrite :: MVar Scurry -> Socket -> IO ()
netWrite _ sck = genericCatch "netWrite" task
where
task = forever $ idle
idle = do
delayOneSec
netDbg "netWrite: tick"
prepSocket :: Scurry -> IO Socket
prepSocket c = do
s <- socket AF_INET Datagram defaultProtocol
setSocketOption s Broadcast 4
bindSocket s sockAddr
return s
where
sockAddr = SockAddrInet (fromIntegral . unIPPort . bindPort $ c)
(unIPV4Addr . bindAddr $ c)
genericCatch :: String -> IO () -> IO ()
genericCatch ident a = do
catch a (gc ident)
where
gc i e = do
let err = show (e :: SomeException)
netDbg $ i ++ ": " ++ err
| sw17ch/Scurry | src/Scurry/NetTask.hs | bsd-3-clause | 2,175 | 0 | 15 | 669 | 725 | 370 | 355 | 62 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies, OverloadedStrings, DataKinds, TypeOperators, TypeSynonymInstances, FlexibleInstances, DeriveGeneric #-}
module WebApi.ResponseSpec (spec) where
import GHC.Generics
import WebApi hiding (get, put, post)
import Test.Hspec
import qualified Network.Wai as Wai
import Test.Hspec.Wai (with, get, request, shouldRespondWith, matchStatus, (<:>), matchHeaders)
import Network.HTTP.Media.MediaType
import Network.HTTP.Types
import Data.Text
import qualified Data.Text.Lazy as L
import Data.Aeson (ToJSON (..))
--withApp :: SpecWith Wai.Application -> Spec
withApp :: SpecWith ((), Wai.Application) -> Spec
withApp = with (return respSpecApp)
respSpecApp :: Wai.Application
respSpecApp = serverApp serverSettings RespSpecImpl
data RespSpec
data RespSpecImpl = RespSpecImpl
data Out = Out { out :: Text }
deriving (Show, Eq, Generic)
data HOut = HOut { hOut :: Text }
deriving (Show, Eq, Generic)
data COut = COut { cOut :: Text }
deriving (Show, Eq, Generic)
data Err = Err { err :: Text }
deriving (Show, Eq, Generic)
instance ToJSON Err
instance ToJSON Out
instance ToHeader HOut
instance ToParam 'Cookie COut
instance ParamErrToApiErr Err where
toApiErr = const (Err "fail")
type ApiResp = Static "apiresp"
type ApiWithHeaders = Static "apih"
type ApiWithError = Static "apierror"
type TextCType = Static "text"
type LazyEncoding = Static "lazyencoding"
instance WebApi RespSpec where
type Apis RespSpec = '[ Route '[GET] ApiResp
, Route '[GET] ApiWithHeaders
, Route '[GET] ApiWithError
, Route '[GET] TextCType
, Route '[GET] LazyEncoding]
instance WebApiServer RespSpecImpl where
type ApiInterface RespSpecImpl = RespSpec
type HandlerM RespSpecImpl = IO
instance ApiContract RespSpec GET ApiResp where
type ApiOut GET ApiResp = Out
instance ApiContract RespSpec GET ApiWithHeaders where
type ApiOut GET ApiWithHeaders = Out
type HeaderOut GET ApiWithHeaders = HOut
type CookieOut GET ApiWithHeaders = COut
instance ApiContract RespSpec GET ApiWithError where
type ApiOut GET ApiWithError = Out
type ApiErr GET ApiWithError = Err
instance ApiContract RespSpec GET TextCType where
type ApiOut GET TextCType = L.Text
type ApiErr GET TextCType = L.Text
type ContentTypes GET TextCType = '[PlainText]
instance ApiContract RespSpec GET LazyEncoding where
type ApiOut GET LazyEncoding = Out
type ContentTypes GET LazyEncoding = '[DummyCType, JSON]
instance ApiHandler RespSpecImpl GET ApiResp where
handler _ _ = respond (Out "Done")
instance ApiHandler RespSpecImpl GET ApiWithHeaders where
handler _ _ = respondWith status200 (Out "Done") (HOut "header") (COut "cookie")
instance ApiHandler RespSpecImpl GET ApiWithError where
handler _ _ = do
-- raise should short circuit
_ <- (raise status500 (Err "fail") :: IO (Response GET ApiWithError))
-- raiseWith' _ -- (ApiError status500 (Err "fail") Nothing Nothing) -- :: ApiError GET ApiWithError)
-- which means respond will never get called
respond (Out "Done")
instance ApiHandler RespSpecImpl GET TextCType where
handler _ _ = respond "plaintext"
instance ApiHandler RespSpecImpl GET LazyEncoding where
handler _ _ = respond (Out "Done")
data DummyCType
instance Accept DummyCType where
contentType _ = "application" // "dummy"
instance Encode DummyCType a where
encode _ = error "Dummy content type not implemented"
spec :: Spec
spec = withApp $ describe "WebApi response" $ do
context "Simple Response" $ do
it "should be 200 ok" $ do
get "apiresp" `shouldRespondWith` 200
context "Response with response header and cookies" $ do
it "should be 200 ok" $ do
get "apih" `shouldRespondWith` "{\"out\":\"Done\"}" { matchHeaders = [ "hOut" <:> "header"
, "Set-Cookie" <:> "cOut=cookie"
, "Content-Type" <:> "application/json"]
, matchStatus = 200 }
context "Response with api error" $ do
it "should be 500 ok" $ do
get "apierror" `shouldRespondWith` 500
context "Response with text as content type" $ do
it "should be 200 ok" $ do
get "text" `shouldRespondWith` "plaintext" { matchHeaders = ["Content-Type" <:> "text/plain;charset=utf-8"]
, matchStatus = 200 }
context "Response should get encoded lazily" $ do
it "should be 200 ok" $ do
let h = [(hAccept, "application/json")]
request methodGet "lazyencoding" h "" `shouldRespondWith` "{\"out\":\"Done\"}" { matchHeaders = ["Content-Type" <:> "application/json"]
, matchStatus = 200 }
| byteally/webapi | webapi/tests/WebApi/ResponseSpec.hs | bsd-3-clause | 5,073 | 0 | 18 | 1,323 | 1,243 | 663 | 580 | -1 | -1 |
{-# LANGUAGE MultiWayIf #-}
module Games.Cribbage where
import Prelude as P
import qualified Data.Map as M
data Suit
= Hearts
| Clubs
| Diamonds
| Spades
deriving (Eq,Show)
data Rank
= Ace
| Two
| Three
| Four
| Five
| Six
| Seven
| Eight
| Nine
| Ten
| Jack
| Queen
| King
deriving (Eq,Ord,Enum,Show)
data Card = Card
{ rank :: Rank
, suit :: Suit
} deriving (Eq,Show)
points :: [Card] -> Int
points cs = sum
[ runs rm
, pairs rm
-- , fifteens fm
]
where
rm = rankGroups cs
-- Runs {{{
runs :: [(Rank,Int)] -> Int
runs = sum . map runPoints . adjacents
runPoints :: (Rank,Rank,Int) -> Int
runPoints (l,h,w)
| diff >= 2 = (diff + 1) * w
| otherwise = 0
where
diff = hInt - lInt
lInt = fromEnum l
hInt = fromEnum h
adjacents :: [(Rank,Int)] -> [(Rank,Rank,Int)]
adjacents rm = foldr f [] rm
where
f (r,i) = loop (r,i)
loop (r,i) im = case im of
[] -> [(r,r,i)]
(l,h,i') : rest
| r == pred l -> (r,h,i * i') : rest
| r == succ h -> (l,r,i * i') : rest
| otherwise -> (l,h,i) : loop (r,i) rest
-- }}}
-- Pairs {{{
pairs :: [(Rank,Int)] -> Int
pairs = sum . map pairPoints
pairPoints :: (Rank,Int) -> Int
pairPoints (_,i)
| i > 1 = (i `choose` 2) * 2
| otherwise = 0
-- n!
-- -------------
-- k! * (n - k)!
choose :: Int -> Int -> Int
n `choose` k = fac n `div` (fac k * fac (n - k))
fac :: Int -> Int
fac n
| n `elem` [0,1] = 1
| n > 1 = n * fac (n - 1)
| otherwise = error "negative factorial"
-- }}}
-- Fifteens {{{
-- TODO: count fifteens
-- }}}
rankGroups :: [Card] -> [(Rank,Int)]
rankGroups cs = M.toList $ foldr f M.empty cs
where
f (Card r _) = flip M.alter r $ \mi -> case mi of
Just i -> Just (i + 1)
Nothing -> Just 1
testCards :: [Card]
testCards =
[ Card Ace Spades
, Card Two Hearts
, Card Two Diamonds
, Card Three Clubs
, Card Four Spades
]
| kylcarte/games | src/Games/Cribbage.hs | bsd-3-clause | 1,937 | 0 | 14 | 568 | 933 | 515 | 418 | 77 | 2 |
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, PatternGuards, OverloadedStrings #-}
module Data.FieldML.Level1ToLevel2 (loadL2ModelFromURL) where
import qualified Data.FieldML.Level1Structure as L1
import qualified Data.FieldML.Level2Structure as L2
import Network.Curl
import Data.IORef
import System.FilePath
import Control.Monad
import Control.Monad.Error
import Control.Monad.State
import Control.Monad.Reader
import Control.Applicative
import Data.FieldML.Parser
import Data.FieldML.InitialModel
import Data.List
import Data.Typeable
import Data.Data
import Data.Default
import Data.Maybe
import Data.Generics.Uniplate.Data
import qualified Data.Map as M
import Data.Map ((!))
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import qualified Data.Set as S
import qualified Data.Traversable as T
import qualified Data.Foldable as T
import Data.Monoid
import qualified Debug.Trace
-- | Loads an L2 model, and all dependencies, or returns an error.
loadL2ModelFromURL :: [String] -> String -> ErrorT String IO (L2.L2Model)
loadL2ModelFromURL incl mpath =
runLookupModel (loadL2ModelFromURL' incl mpath)
-- | The monad in which the overall model load occurs; it stores a
-- a list of model URLs already loaded (with their cached L2 models)
-- and allows for model loading errors to occur. It also allows
-- lifted IO monad expressions.
data LookupModel a = LookupModel { unlookupModel :: StateT (M.Map BS.ByteString L2.L2Model) (ErrorT String IO) a }
instance Monad LookupModel where
return = LookupModel . return
(LookupModel a) >>= f = LookupModel (a >>= liftM unlookupModel f)
fail = LookupModel . lift . fail
instance Functor LookupModel where
fmap = liftM
instance Applicative LookupModel where
pure = return
(<*>) = ap
instance MonadIO LookupModel where
liftIO = LookupModel . lift . lift
instance MonadState LookupModel where
type StateType LookupModel = M.Map BS.ByteString L2.L2Model
get = LookupModel get
put = LookupModel . put
-- | Runs a LookupModel in the ErrorT String IO monad.
runLookupModel :: LookupModel a -> ErrorT String IO a
runLookupModel x = evalStateT (unlookupModel x) M.empty
-- | Tries to load a particular model from the cache or by loading it from
-- the filesystem (looking in the specified paths) and parsing it. The
-- parse may recursively load further models.
loadL2ModelFromURL' :: [String] -> String -> LookupModel L2.L2Model
loadL2ModelFromURL' incl mpath = do
-- Try to find the model first...
maybe (fail $ "Could not find model " ++ mpath ++ " anywhere in the include paths " ++ show incl) return =<< (
firstJustM $ flip map incl $ \tryIncl -> do
let fullPath = tryIncl ++ mpath
alreadyLoaded <- M.lookup (BSC.pack fullPath) <$> get
case alreadyLoaded of
Just m -> return $ Just m
Nothing -> do
(r, v) <- liftIO $ curlGetString_ (tryIncl ++ mpath) []
if r == CurlOK
then do
nsc <- LookupModel . lift . ErrorT $ return (parseFieldML mpath v)
finalImpMap <- tryResolveAllImports incl nsc
mod <- LookupModel . lift $ translateL1ToL2 mpath nsc finalImpMap
modify (M.insert (BSC.pack fullPath) mod)
return $ Just mod
else
return Nothing)
-- | Attempts to load all imports mentioned in a L1 model.
tryResolveAllImports :: [String] -> L1.L1NamespaceContents -> LookupModel (M.Map BS.ByteString L2.L2Model)
tryResolveAllImports incl nsc = do
forM_ (nub . sort $ [v | L1.L1NSImport { L1.l1nsImportFrom = Just v} <- universeBi nsc]) $ \toImport -> do
lm <- loadL2ModelFromURL' incl (BSC.unpack toImport)
modify $ M.insert toImport lm
get
-- | Attempts to translate a parsed L1 model into L2. This function depends
-- on all imports being loaded already.
translateL1ToL2 :: String -> L1.L1NamespaceContents -> M.Map BS.ByteString L2.L2Model -> ErrorT String IO L2.L2Model
translateL1ToL2 mpath l1ns impmap =
flip runReaderT (handleL1SimpleSyntacticSugar (BSC.pack mpath) l1ns, mpath, impmap) $
flip evalStateT ((def :: ModelTranslationState) { mtsL2ToL1Map = M.singleton nsMain l1ns} ) $ do
-- Create a skeleton model, where all symbols except those that are
-- imported exist, but do not yet have their proper definitions set up
-- yet.
(l1ns, _, _) <- ask
buildSkeletonModel (L1.SrcSpan mpath 0 0 0 0) l1ns nsMain
-- Load all external imports into the model.
processExternalImports nsMain
-- Resolve all internal imports in the model.
processInternalImports nsMain
-- Fill out the model skeleton with the actual functions.
recursivelyTranslateModel nsMain
-- Repair references to temporary IDs for aliases...
fixAliasReferences nsMain
getL2Model
newtype DesugarTmp = DesugarTmp Int
desugarTmpName :: State DesugarTmp BS.ByteString
desugarTmpName = do
DesugarTmp sugId <- get
put (DesugarTmp (sugId + 1))
return $ BSC.pack ("_desugar_" ++ show sugId)
-- | Some L1 constructs can be translated to an equivalent canonical L1 that
-- doesn't use certain features. This function is run to simplify the L1
-- input slightly to make the L1 -> L2 translation slightly easier to write.
-- It makes the following transformations:
-- => import x as y => namespace y where import x
-- => import from x ... | x == own URI => import ...
-- => ensemble {...} as x => namespace x where ensemble {...}
handleL1SimpleSyntacticSugar :: BS.ByteString -> L1.L1NamespaceContents -> L1.L1NamespaceContents
handleL1SimpleSyntacticSugar self =
transformBi (g . f) . (\v -> evalState (transformBiM (
desugarL1Patterns <=<
desugarFCase <=<
desugarComplexLambda
) v) (DesugarTmp 0))
where
f ([email protected] { L1.l1nsSS = ss, L1.l1nsImportAs = Just impas}) =
L1.L1NSNamespace { L1.l1nsSS = ss, L1.l1nsNamespaceName = impas,
L1.l1nsNamespaceContents = L1.L1NamespaceContents [imp{L1.l1nsImportAs = Nothing}]
}
f (ens@(L1.L1NSEnsemble{ L1.l1nsSS = ss, L1.l1nsAs = Just x})) =
L1.L1NSNamespace { L1.l1nsSS = ss, L1.l1nsNamespaceName = x,
L1.l1nsNamespaceContents = L1.L1NamespaceContents [
ens { L1.l1nsAs = Nothing }
]}
f x = x
g ([email protected] { L1.l1nsImportFrom = Just impfrom })
| impfrom == self = imp { L1.l1nsImportFrom = Nothing }
g x = x
desugarFCase :: L1.L1Expression -> State DesugarTmp L1.L1Expression
desugarFCase (L1.L1ExFCase ss isClosed values) = do
tmpScopedID <- L1.L1ScopedID ss <$> desugarTmpName
return $ L1.L1ExLambda ss (L1.L1PatternBind ss tmpScopedID) (L1.L1ExCase ss (L1.L1ExBoundVariable ss tmpScopedID) values)
desugarFCase x = return x
desugarComplexLambda :: L1.L1Expression -> State DesugarTmp L1.L1Expression
desugarComplexLambda simpleLambda@(L1.L1ExLambda _ (L1.L1PatternBind _ _) _) = return simpleLambda
desugarComplexLambda (L1.L1ExLambda ss complexPattern value) =
return $
L1.L1ExFCase ss False (Right [(complexPattern, value)])
desugarComplexLambda x = return x
pathGlobal :: L1.SrcSpan -> BS.ByteString -> L1.L1RelOrAbsPathPossiblyIntEnd
pathGlobal ss g = L1.L1RelOrAbsPathNoInt ss True (L1.L1RelPath ss [L1.L1Identifier ss g])
makeErrorExpression :: L1.SrcSpan -> BS.ByteString -> L1.L1Expression
makeErrorExpression ss v = (L1.L1ExApply ss
(L1.L1ExReference ss (pathGlobal ss "error"))
(L1.L1ExString ss v)
)
ignoreArgumentLambda :: L1.SrcSpan -> L1.L1Expression -> State DesugarTmp L1.L1Expression
ignoreArgumentLambda ss ex =
L1.L1ExLambda ss
<$> (L1.L1PatternBind ss <$> (L1.L1ScopedID ss <$> desugarTmpName))
<*> (pure ex)
desugarL1Patterns :: L1.L1Expression -> State DesugarTmp L1.L1Expression
desugarL1Patterns ex@(L1.L1ExCase ss expr (Right values)) =
-- We sequentially test each value for a match, and if they match, we then go ahead and try to extract the contents.
let
addPatternToCase otherwiseEx (pat, ifEx) =
L1.L1ExCase ss
<$> (testPatternUsing expr pat)
<*> (
(\x y -> Left [x, y])
<$> ((,) (pathGlobal ss "true") <$>
(ignoreArgumentLambda ss =<< patternToExtractLambdas expr pat ifEx))
<*> ((,) (pathGlobal ss "false") <$>
ignoreArgumentLambda ss otherwiseEx)
)
in
foldM addPatternToCase (makeErrorExpression ss $ "Nothing matched pattern at " <> (BSC.pack . show $ ss)) (reverse values)
desugarL1Patterns ex = return ex
testPatternUsing :: L1.L1Expression -> L1.L1Pattern -> State DesugarTmp L1.L1Expression
testPatternUsing _ (L1.L1PatternIgnore ss) = return $ L1.L1ExReference ss (pathGlobal ss "true")
testPatternUsing _ (L1.L1PatternBind ss _) = return $ L1.L1ExReference ss (pathGlobal ss "true")
testPatternUsing testEx (L1.L1PatternAs ss label pattern) = do
lambdaScoped <- L1.L1ScopedID ss <$> desugarTmpName
subPatternTest <- testPatternUsing (L1.L1ExBoundVariable ss lambdaScoped) pattern
return $ L1.L1ExIsLabel ss label testEx
testPatternUsing testEx (L1.L1PatternProduct ss []) =
return $ (L1.L1ExReference ss (pathGlobal ss "true"))
testPatternUsing testEx (L1.L1PatternProduct ss args) = do
let
testProductPart (label, pat) = testPatternUsing (L1.L1ExApply ss (L1.L1ExProject ss label) testEx) pat
lastTest <- (testProductPart (last args))
foldM (\exOther prodThis -> L1.L1ExApply ss (L1.L1ExApply ss (L1.L1ExReference ss (pathGlobal ss "&&")) exOther)
<$> testProductPart prodThis
) lastTest (init args)
patternToExtractLambdas :: L1.L1Expression -> L1.L1Pattern -> L1.L1Expression -> State DesugarTmp L1.L1Expression
patternToExtractLambdas testEx (L1.L1PatternIgnore ss) ifEx = pure ifEx
patternToExtractLambdas testEx (L1.L1PatternBind ss svar) ifEx =
return $ L1.L1ExApply ss (L1.L1ExLambda ss (L1.L1PatternBind ss svar) ifEx) testEx
patternToExtractLambdas testEx (L1.L1PatternAs ss label pattern) ifEx =
patternToExtractLambdas (L1.L1ExUnmkUnion ss label testEx) pattern ifEx
patternToExtractLambdas _ (L1.L1PatternProduct ss []) ifEx = return ifEx
patternToExtractLambdas testEx (L1.L1PatternProduct ss args) ifEx =
foldM (\oldIfEx (label, pattern) ->
patternToExtractLambdas (L1.L1ExApply ss (L1.L1ExProject ss label) testEx) pattern oldIfEx)
ifEx args
-- | The ModelTranslation monad carries all state and reader information needed to
-- translate a L1 model into a L2 model.
type ModelTranslation a = StateT ModelTranslationState
(ReaderT (L1.L1NamespaceContents, String, M.Map BS.ByteString L2.L2Model)
(ErrorT String IO)) a
-- | ModelTranslationState carries all information needed to describe the current state
-- of the model translation from L1 -> L2 in progress.
data ModelTranslationState = ModelTranslationState {
mtsL2Model :: L2.L2Model, -- ^ The L2 model, so far.
-- | Maps L2 namespace IDs to L1 contents.
-- Note: This may be unnecessary in the two pass system.
mtsL2ToL1Map :: M.Map L2.L2NamespaceID L1.L1NamespaceContents,
mtsForeignToLocalNS :: ForeignToLocal L2.L2NamespaceID,
mtsForeignToLocalDomains ::
ForeignToLocal L2.L2DomainID,
mtsForeignToLocalValues ::
ForeignToLocal L2.L2ValueID,
mtsForeignToLocalBaseUnits ::
ForeignToLocal L2.L2BaseUnitsID,
mtsForeignToLocalClass ::
ForeignToLocal L2.L2ClassID,
mtsForeignToLocalScopedValue ::
ForeignToLocal L2.L2ScopedValueID,
mtsForeignToLocalScopedUnit ::
ForeignToLocal L2.L2ScopedUnitID,
mtsForeignToLocalScopedDomain ::
ForeignToLocal L2.L2ScopedDomainID,
mtsForeignToLocalDomainFunction ::
ForeignToLocal L2.L2DomainFunctionID,
mtsForeignToLocalClassValue ::
ForeignToLocal L2.L2ClassValueID,
-- | Temporary IDs are used during translation for aliases, before we are
-- ready to actually process the alias. Since they are used in types
-- that normally carry permanent IDs, they are negative to distinguish
-- them.
mtsNextTempId :: Int,
-- | Maps temporary IDs to a domain type.
mtsTempIDDomainType :: M.Map L2.L2DomainID L2.L2DomainExpression,
-- | Maps temporary IDs to a unit expression.
mtsTempIDUnitEx :: M.Map L2.L2BaseUnitsID L2.L2UnitExpression
}
instance Default ModelTranslationState where
def = ModelTranslationState {
mtsL2Model = initialModel,
mtsL2ToL1Map = M.empty,
mtsForeignToLocalNS = M.empty,
mtsForeignToLocalDomains = M.empty,
mtsForeignToLocalValues = M.empty,
mtsForeignToLocalBaseUnits = M.empty,
mtsForeignToLocalClass = M.empty,
mtsForeignToLocalScopedUnit = M.empty,
mtsForeignToLocalScopedValue = M.empty,
mtsForeignToLocalDomainFunction = M.empty,
mtsForeignToLocalClassValue = M.empty,
mtsForeignToLocalScopedDomain = M.empty,
mtsTempIDDomainType = M.empty,
mtsTempIDUnitEx = M.empty,
mtsNextTempId = -1
}
-- | The type of a map from a foreign URL (model identifier) and ID to a local
-- identifier. Used to keep track of whether a foreign symbol has already
-- been imported.
type ForeignToLocal a = M.Map (L2.Identifier, a) a
-- | Scope information is carried down expression trees, but not across to
-- different branches (unlike the other state).
data ScopeInformation = ScopeInformation {
siValueIDMap :: M.Map L1.L1ScopedID L2.L2ScopedValueID,
siUnitIDMap :: M.Map L1.L1ScopedID L2.L2ScopedUnitID,
siDomainIDMap :: M.Map L1.L1ScopedID L2.L2ScopedDomainID
} deriving (Eq, Ord, Show)
instance Default ScopeInformation where
def = ScopeInformation M.empty M.empty M.empty
buildSkeletonModel ss nsc@(L1.L1NamespaceContents c) myNSID = do
let dtRef ss x = L2.L2DomainReference ss x
let dummyDT ss = dtRef ss (L2.L2DomainID 0)
let dummyCVC ss = L2.L2ClassValueContents ss (dummyDT ss)
-- TODO - check for name conflicts.
forM_ c $ \nsel ->
case nsel of
L1.L1NSNamespace { L1.l1nsSS = nsss, L1.l1nsNamespaceName = L1.L1Identifier _ nsname,
L1.l1nsNamespaceContents = newnsc } -> do
newNSID <- registerNewNamespace nsss myNSID newnsc
newLabel <- L2.l2nsNextLabel <$> getNamespaceContents "bsm nextLabel" myNSID
-- Debug.Trace.trace ("Adding Label#" ++ show newLabel ++ " into " ++ show myNSID ++ " for " ++ (show nsname)) (return ())
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsNamespaces = M.insert nsname newNSID (L2.l2nsNamespaces l2nsc),
L2.l2nsLabels = M.insert nsname (L2.L2Label myNSID (fromIntegral newLabel)) (L2.l2nsLabels l2nsc),
L2.l2nsNextLabel = newLabel + 1
}
tmpId <- mtsNextTempId <$> get
modify $ \mts -> mts { mtsNextTempId = tmpId - 1 }
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsDomains = M.insert nsname (dtRef nsss $ L2.L2DomainID tmpId) (L2.l2nsDomains l2nsc)
}
buildSkeletonModel nsss newnsc newNSID
L1.L1NSDomain { L1.l1nsSS = nsss, L1.l1nsDomainName = L1.L1Identifier _ nsname,
L1.l1nsNamespaceContents = newnsc,
L1.l1nsDomainDefinition = dd } -> do
newNSID <- registerNewNamespace nsss myNSID newnsc
newLabel <- L2.l2nsNextLabel <$> getNamespaceContents "bsm domain nextlabel" myNSID
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsNamespaces = M.insert nsname newNSID (L2.l2nsNamespaces l2nsc),
L2.l2nsLabels = M.insert nsname (L2.L2Label myNSID (fromIntegral newLabel)) (L2.l2nsLabels l2nsc),
L2.l2nsNextLabel = newLabel + 1
}
buildSkeletonModel nsss newnsc newNSID
case dd of
L1.L1DomainDefDomainType ddss _ -> do
-- It is an alias, but we aren't ready to process the alias yet,
-- so we allocate a temporary ID...
tmpId <- mtsNextTempId <$> get
modify $ \mts -> mts { mtsNextTempId = tmpId - 1 }
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsDomains = M.insert nsname (dtRef ddss $ L2.L2DomainID tmpId) (L2.l2nsDomains l2nsc)
}
_ -> do
-- It's a clonelike domain, so allocate an actual domain ID...
newDomainID <- L2.l2NextDomain <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextDomain = (\(L2.L2DomainID i) -> L2.L2DomainID (i + 1)) newDomainID,
L2.l2AllDomains = M.insert newDomainID (L2.L2ClonelikeDomainContents nsss (dummyDT nsss) L2.L2DomainClone)
(L2.l2AllDomains mod)
}
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsDomains = M.insert nsname (dtRef nsss newDomainID) (L2.l2nsDomains l2nsc)
}
L1.L1NSNamedValue { L1.l1nsSS = nsss, L1.l1nsValueName = L1.L1Identifier _ nvname } -> do
newValueID <- L2.l2NextValue <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextValue = (\(L2.L2ValueID i) -> L2.L2ValueID (i + 1)) newValueID,
L2.l2AllValues = M.insert newValueID (L2.L2ValueContents nsss Nothing)
(L2.l2AllValues mod)
}
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsNamedValues = M.insert nvname newValueID (L2.l2nsNamedValues l2nsc)
}
L1.L1NSClass { L1.l1nsSS = nsss, L1.l1nsClassName = L1.L1Identifier _ clname, L1.l1nsClassDomainFunctions = dfs,
L1.l1nsClassValues = cvs, L1.l1nsClassParameters = p } -> do
l2dfs <- forM dfs $ \(L1.L1Identifier dfss dfname, n) -> do
newDFID <- L2.l2NextDomainFunctionID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextDomainFunctionID =
(\(L2.L2DomainFunctionID i) -> L2.L2DomainFunctionID (i + 1)) newDFID,
L2.l2AllDomainFunctions =
M.insert newDFID (L2.L2DomainFunctionContents dfss n) (L2.l2AllDomainFunctions mod)
}
return (dfname, newDFID)
l2cvs <- forM cvs $ \(L1.L1Identifier cvss cvname, _) -> do
newCVID <- L2.l2NextClassValueID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextClassValueID =
(\(L2.L2ClassValueID i) -> L2.L2ClassValueID (i + 1)) newCVID,
L2.l2AllClassValues =
M.insert newCVID (dummyCVC cvss) (L2.l2AllClassValues mod)
}
return (cvname, newCVID)
newClassID <- L2.l2NextClassID <$> getL2Model
l2p <- mapM (\(L1.L1ScopedID { L1.l1ScopedIdBS = sdName }, k) ->
(,)
<$> (do
newSDID <- L2.l2NextScopedDomainID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextScopedDomainID =
(\(L2.L2ScopedDomainID _ i) -> L2.L2ScopedDomainID "unnamed!" (i + 1))
newSDID
}
return (newSDID { L2.l2SDIDName = sdName })
) <*> pure k) p
modifyL2Model $ \mod -> mod {
L2.l2NextClassID = (\(L2.L2ClassID i) -> L2.L2ClassID (i + 1)) newClassID,
L2.l2AllClasses = M.insert newClassID (L2.L2ClassContents nsss l2p (M.fromList l2dfs) (M.fromList l2cvs))
(L2.l2AllClasses mod)
}
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsClasses = M.insert clname newClassID (L2.l2nsClasses l2nsc),
L2.l2nsDomainFunctions = foldl' (\s (dfname, dfid) -> M.insert dfname dfid s) (L2.l2nsDomainFunctions l2nsc) l2dfs,
L2.l2nsClassValues = foldl' (\s (cvname, cvid) -> M.insert cvname cvid s) (L2.l2nsClassValues l2nsc) l2cvs
}
L1.L1NSEnsemble { L1.l1nsLabels = labs } ->
forM_ labs $ \(L1.L1Identifier labss lab) -> do
modifyNamespaceContents myNSID $ \l2nsc ->
let
newLabel = L2.l2nsNextLabel l2nsc
in
l2nsc { L2.l2nsNextLabel = newLabel + 1,
L2.l2nsLabels = M.insert lab (L2.L2Label myNSID (fromIntegral newLabel)) (L2.l2nsLabels l2nsc) }
L1.L1NSUnit { L1.l1nsSS = nsss, L1.l1nsUnitName = L1.L1Identifier _ uname } -> do
tmpId <- mtsNextTempId <$> get
modify $ \mts -> mts { mtsNextTempId = tmpId - 1 }
modifyNamespaceContents myNSID $ \l2nsc -> l2nsc {
L2.l2nsUnits = M.insert uname (L2.L2UnitExRef nsss (L2.L2BaseUnitsID tmpId))
(L2.l2nsUnits l2nsc)
}
_ -> return ()
-- | Recursively imports symbols into namespaces from other models (leaving local
-- imports alone).
-- ns is the namespace we are requesting be processed now.
-- This imports everything (not just namespaces) required in this namespace from
-- other files, allowing all further processing to focus on the contents of this file.
processExternalImports :: L2.L2NamespaceID -> ModelTranslation ()
processExternalImports ns = do
ModelTranslationState { mtsL2Model = m, mtsL2ToL1Map = nsmap } <- get
let Just (L1.L1NamespaceContents nsc) = M.lookup ns nsmap
forM_ nsc $ \st -> case st of
L1.L1NSImport { L1.l1nsSS = ss,
L1.l1nsImportFrom = Just from,
L1.l1nsImportPath = path,
L1.l1nsImportWhat = what,
L1.l1nsImportHiding = hiding } -> do
-- Find the model we are importing from...
(_, _, impMap) <- ask
impMod <-
maybe (fail $ "Reference to imported model " ++ (BSC.unpack from) ++ " at " ++
(show ss) ++ ", but that model does not appear to have been loaded successfully.")
return
(M.lookup from impMap)
let L1.L1RelOrAbsPath pathSS isAbs rpath = path
when (not isAbs) . fail $ "Import at " ++ (show pathSS) ++
" uses both a from clause and a relative path, which is invalid."
impNS <- findNamespaceInL2UsingL1Path nsMain impMod rpath
symList <- importListFromWhatAndHiding impNS impMod what hiding
mapM_ (recursivelyImportExternalSymbol impMod from impNS ns) symList
L1.L1NSNamespace { L1.l1nsNamespaceName = L1.L1Identifier _ nsid, L1.l1nsNamespaceContents = nsc} -> do
Just childNSID <- findExactNamespace ns nsid
processExternalImports childNSID
L1.L1NSDomain { L1.l1nsDomainName = L1.L1Identifier _ nsid, L1.l1nsNamespaceContents = nsc} -> do
Just childNSID <- findExactNamespace ns nsid
processExternalImports childNSID
_ -> return ()
-- | Pulls in a symbol from another model, along with everything needed for that
-- symbol to be used.
recursivelyImportExternalSymbol :: L2.L2Model -> L2.Identifier -> L2.L2NamespaceID -> L2.L2NamespaceID -> L1.L1Identifier -> ModelTranslation ()
recursivelyImportExternalSymbol foreignMod foreignURL foreignNS localNS ident = do
let foreignNSC = (L2.l2AllNamespaces foreignMod) ! foreignNS
L1.L1Identifier identSS identName = ident
tryLookupImportAndRegister :: (L2.L2NamespaceContents -> M.Map L2.Identifier d) ->
(M.Map L2.Identifier d -> L2.L2NamespaceContents -> L2.L2NamespaceContents) ->
(L2.L2Model -> BS.ByteString -> d ->
ModelTranslation d) -> Maybe (ModelTranslation ())
tryLookupImportAndRegister getMap setMap doImport = do
d <- M.lookup identName (getMap foreignNSC)
return $ do
dl <- doImport foreignMod foreignURL d
modifyNamespaceContents localNS $ \localNSC ->
setMap (M.insert identName dl (getMap localNSC)) localNSC
fromMaybe (fail $ "Internal error: unexpected unknown symbol " ++ (show identName) ++
" from " ++ (show identSS) ++ " in recursivelyImportExternalSymbol") $
msum $ [
tryLookupImportAndRegister L2.l2nsNamespaces (\x mod -> mod{L2.l2nsNamespaces=x}) recursivelyImportExternalNS,
tryLookupImportAndRegister L2.l2nsDomains (\x mod -> mod{L2.l2nsDomains=x}) recursivelyImportExternalDomainExpression,
tryLookupImportAndRegister L2.l2nsNamedValues (\x mod -> mod{L2.l2nsNamedValues=x}) recursivelyImportExternalValue,
tryLookupImportAndRegister L2.l2nsClassValues (\x mod -> mod{L2.l2nsClassValues=x})
recursivelyImportExternalClassValue,
tryLookupImportAndRegister L2.l2nsUnits (\x mod -> mod{L2.l2nsUnits=x}) recursivelyImportExternalUnitExpression,
tryLookupImportAndRegister L2.l2nsClasses (\x mod -> mod{L2.l2nsClasses=x}) recursivelyImportExternalClass,
tryLookupImportAndRegister L2.l2nsDomainFunctions (\x mod -> mod{L2.l2nsDomainFunctions=x})
recursivelyImportExternalDomainFunction,
tryLookupImportAndRegister L2.l2nsLabels (\x mod -> mod{L2.l2nsLabels=x})
recursivelyImportExternalLabel
]
-- | A utility function used for importing external importers that the checks the cache first, imports if it
-- if it isn't found, and saves the result in the cache.
cacheWrapExternalImport :: Ord d =>
(ModelTranslationState -> ForeignToLocal d) ->
(ForeignToLocal d -> ModelTranslationState -> ModelTranslationState) ->
(L2.L2Model -> L2.Identifier -> d -> ModelTranslation d) ->
L2.L2Model -> L2.Identifier -> d -> ModelTranslation d
cacheWrapExternalImport getMap setMap f foreignMod foreignURL target = do
st <- get
case M.lookup (foreignURL, target) (getMap st) of
Just localHit -> return localHit
Nothing -> do
r <- f foreignMod foreignURL target
st' <- get
put (setMap (M.insert (foreignURL, target) r (getMap st)) st')
return r
recursivelyImportExternalNSContents :: L2.L2Model -> L2.Identifier -> L2.L2NamespaceContents -> ModelTranslation L2.L2NamespaceContents
recursivelyImportExternalNSContents foreignMod foreignURL foreignNSC =
L2.L2NamespaceContents (L2.l2nsSrcSpan foreignNSC) <$>
T.mapM (recursivelyImportExternalNS foreignMod foreignURL) (L2.l2nsNamespaces foreignNSC) <*>
T.mapM (recursivelyImportExternalDomainExpression foreignMod foreignURL) (L2.l2nsDomains foreignNSC) <*>
T.mapM (recursivelyImportExternalValue foreignMod foreignURL) (L2.l2nsNamedValues foreignNSC) <*>
T.mapM (recursivelyImportExternalClassValue foreignMod foreignURL) (L2.l2nsClassValues foreignNSC) <*>
T.mapM (recursivelyImportExternalUnitExpression foreignMod foreignURL) (L2.l2nsUnits foreignNSC) <*>
T.mapM (recursivelyImportExternalClass foreignMod foreignURL) (L2.l2nsClasses foreignNSC) <*>
T.mapM (recursivelyImportExternalDomainFunction foreignMod foreignURL) (L2.l2nsDomainFunctions foreignNSC) <*>
T.mapM (recursivelyImportExternalLabel foreignMod foreignURL) (L2.l2nsLabels foreignNSC) <*>
recursivelyImportExternalNS foreignMod foreignURL (L2.l2nsParent foreignNSC) <*>
(pure (L2.l2nsNextLabel foreignNSC))
recursivelyImportExternalNS :: L2.L2Model -> L2.Identifier -> L2.L2NamespaceID -> ModelTranslation L2.L2NamespaceID
recursivelyImportExternalNS =
cacheWrapExternalImport mtsForeignToLocalNS (\x m -> m {mtsForeignToLocalNS=x}) $
\foreignMod foreignURL targetNS ->
if (((\(L2.L2NamespaceID n) -> n) targetNS) < reservedIDs) && targetNS /= nsMain
-- Reserved IDs are a special case: We match on them numerically, and
-- they are globally unique, so just re-use the same numerical ID. We exclude
-- nsMain, however, because that is actually a user-defined namespace.
then return targetNS
else
do
let foreignNSC = (L2.l2AllNamespaces foreignMod) ! targetNS
newNSID <- L2.l2NextNamespace <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextNamespace = (\(L2.L2NamespaceID nid) -> L2.L2NamespaceID (nid + 1)) newNSID
}
-- The cache wrapper will eventually save the namespace, but recursivelyImportExternalNSContents
-- might need an answer before then to retrieve its parent namespace, so cache it now...
modify (\mts -> mts { mtsForeignToLocalNS = M.insert (foreignURL, targetNS)
newNSID (mtsForeignToLocalNS mts) })
newNSC <- recursivelyImportExternalNSContents foreignMod foreignURL foreignNSC
modifyL2Model $ \mod -> mod {
L2.l2AllNamespaces = M.insert newNSID
newNSC (L2.l2AllNamespaces mod)
}
-- destURL <- (\(_,url,_) -> url) <$> ask
-- Debug.Trace.trace ("Imported namespace " ++ show targetNS ++ " from " ++ BSC.unpack foreignURL ++
-- " into " ++ destURL ++
-- " as " ++ show newNSID) $! return ()
return newNSID
recursivelyImportExternalUnitExpression :: L2.L2Model -> L2.Identifier -> L2.L2UnitExpression -> ModelTranslation L2.L2UnitExpression
recursivelyImportExternalUnitExpression foreignMod foreignURL targetEx =
case targetEx of
[email protected]{} -> return ex
L2.L2UnitExRef ss ref -> L2.L2UnitExRef ss <$> (recursivelyImportExternalBaseUnit foreignMod foreignURL ref)
L2.L2UnitExTimes ss ex1 ex2 -> L2.L2UnitExTimes ss <$> (recursivelyImportExternalUnitExpression foreignMod foreignURL ex1)
<*> (recursivelyImportExternalUnitExpression foreignMod foreignURL ex2)
L2.L2UnitPow ss ex1 power -> L2.L2UnitPow ss <$> (recursivelyImportExternalUnitExpression foreignMod foreignURL ex1)
<*> (pure power)
L2.L2UnitScalarMup ss scal ex -> L2.L2UnitScalarMup ss scal <$> (recursivelyImportExternalUnitExpression foreignMod foreignURL ex)
L2.L2UnitScopedVar ss su -> L2.L2UnitScopedVar ss <$> (recursivelyImportExternalScopedUnit foreignMod foreignURL su)
recursivelyImportExternalScopedUnit :: L2.L2Model -> L2.Identifier -> L2.L2ScopedUnitID -> ModelTranslation L2.L2ScopedUnitID
recursivelyImportExternalScopedUnit m ident (targetSUID@(L2.L2ScopedUnitID suname _)) =
(setScopedUnitName suname) <$>
(cacheWrapExternalImport mtsForeignToLocalScopedUnit (\x m -> m {mtsForeignToLocalScopedUnit = x}) $
\foreignMod foreignURL targetSUID -> do
newSUID <- L2.l2NextScopedUnitID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextScopedUnitID = (\(L2.L2ScopedUnitID _ i) -> L2.L2ScopedUnitID "unnamed!" (i + 1)) newSUID
}
return newSUID
) m ident targetSUID
recursivelyImportExternalScopedDomain :: L2.L2Model -> L2.Identifier -> L2.L2ScopedDomainID -> ModelTranslation L2.L2ScopedDomainID
recursivelyImportExternalScopedDomain m ident (targetSUID@(L2.L2ScopedDomainID suname _)) =
(setScopedDomainName suname) <$>
(cacheWrapExternalImport mtsForeignToLocalScopedDomain (\x m -> m {mtsForeignToLocalScopedDomain = x}) $
\foreignMod foreignURL targetSUID -> do
newSUID <- L2.l2NextScopedDomainID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextScopedDomainID = (\(L2.L2ScopedDomainID _ i) -> L2.L2ScopedDomainID "unnamed!" (i + 1)) newSUID
}
return newSUID
) m ident targetSUID
recursivelyImportExternalScopedValue :: L2.L2Model -> L2.Identifier -> L2.L2ScopedValueID -> ModelTranslation L2.L2ScopedValueID
recursivelyImportExternalScopedValue = cacheWrapExternalImport mtsForeignToLocalScopedValue (\x m -> m {mtsForeignToLocalScopedValue = x}) $
\foreignMod foreignURL targetSVID -> do
newSVID <- L2.l2NextScopedValueID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextScopedValueID = (\(L2.L2ScopedValueID i) -> L2.L2ScopedValueID (i + 1)) newSVID
}
return newSVID
recursivelyImportExternalBaseUnit :: L2.L2Model -> L2.Identifier -> L2.L2BaseUnitsID -> ModelTranslation L2.L2BaseUnitsID
recursivelyImportExternalBaseUnit = cacheWrapExternalImport mtsForeignToLocalBaseUnits (\x m -> m{mtsForeignToLocalBaseUnits=x}) $
\foreignMod foreignURL bu -> do
newBUID <- L2.l2NextBaseUnits <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextBaseUnits = (\(L2.L2BaseUnitsID i) -> L2.L2BaseUnitsID (i + 1)) newBUID,
-- Note that L2BaseUnitContents only contains a SrcSpan so needs no translation...
L2.l2AllBaseUnits = M.insert newBUID (fromJust . M.lookup bu . L2.l2AllBaseUnits $ foreignMod)
(L2.l2AllBaseUnits mod)
}
return newBUID
recursivelyImportExternalDomainExpression :: L2.L2Model -> L2.Identifier -> L2.L2DomainExpression -> ModelTranslation L2.L2DomainExpression
recursivelyImportExternalDomainExpression foreignMod foreignURL targetEx =
case targetEx of
L2.L2DomainExpressionProduct ss l ->
L2.L2DomainExpressionProduct ss <$>
recursivelyImportExternalLabelledDomains foreignMod foreignURL l
L2.L2DomainExpressionDisjointUnion ss l ->
L2.L2DomainExpressionDisjointUnion ss <$>
recursivelyImportExternalLabelledDomains foreignMod foreignURL l
L2.L2DomainExpressionFieldSignature ss dd dcd ->
L2.L2DomainExpressionFieldSignature ss
<$> recursivelyImportExternalDomainExpression foreignMod foreignURL dd
<*> recursivelyImportExternalDomainExpression foreignMod foreignURL dcd
L2.L2DomainExpressionReal ss u ->
L2.L2DomainExpressionReal ss
<$> recursivelyImportExternalUnitExpression foreignMod foreignURL u
L2.L2DomainExpressionApply ss domArgs unitArgs val ->
L2.L2DomainExpressionApply ss
<$> mapM (\(sdid, domExpr) ->
(,) sdid
<$> recursivelyImportExternalDomainExpression foreignMod foreignURL domExpr)
domArgs
<*> mapM (\(sdid, unitExpr) ->
(,) sdid
<$> recursivelyImportExternalUnitExpression foreignMod foreignURL unitExpr)
unitArgs
<*> recursivelyImportExternalDomainExpression foreignMod foreignURL val
L2.L2DomainFunctionEvaluate ss dfid args ->
L2.L2DomainFunctionEvaluate ss
<$> recursivelyImportExternalDomainFunction foreignMod foreignURL dfid
<*> mapM (recursivelyImportExternalDomainExpression foreignMod foreignURL) args
L2.L2DomainVariableRef ss sdid ->
L2.L2DomainVariableRef ss <$> (recursivelyImportExternalScopedDomain foreignMod foreignURL sdid)
L2.L2DomainExpressionLambda ss scopedDoms scopedUnits uConstraints dEqs dRels dex ->
L2.L2DomainExpressionLambda ss
<$> mapM (\(d, k) ->
(,)
<$> recursivelyImportExternalScopedDomain foreignMod foreignURL d
<*> pure k) scopedDoms
<*> mapM (recursivelyImportExternalScopedUnit foreignMod foreignURL) scopedUnits
<*> mapM (\(uex1, uex2) -> (,)
<$> recursivelyImportExternalUnitExpression foreignMod foreignURL uex1
<*> recursivelyImportExternalUnitExpression foreignMod foreignURL uex2) uConstraints
<*> mapM (\(dex1, dex2) -> (,)
<$> recursivelyImportExternalDomainExpression foreignMod foreignURL dex1
<*> recursivelyImportExternalDomainExpression foreignMod foreignURL dex2) dEqs
<*> mapM (\(cls, dexs) -> (,)
<$> recursivelyImportExternalClassExpression foreignMod foreignURL cls
<*> mapM (recursivelyImportExternalDomainExpression foreignMod foreignURL) dexs
) dRels
<*> recursivelyImportExternalDomainExpression foreignMod foreignURL dex
recursivelyImportExternalClassExpression :: L2.L2Model -> L2.Identifier -> L2.L2ClassExpression -> ModelTranslation L2.L2ClassExpression
recursivelyImportExternalClassExpression foreignMod foreignURL clsex =
case clsex of
L2.L2ClassExpressionReference ss clsid -> L2.L2ClassExpressionReference ss <$>
recursivelyImportExternalClass foreignMod foreignURL clsid
L2.L2ClassExpressionOpenDisjointUnion ss labs ->
L2.L2ClassExpressionOpenDisjointUnion ss <$>
recursivelyImportExternalLabelledDomains foreignMod foreignURL labs
L2.L2ClassExpressionList ss exs ->
L2.L2ClassExpressionList ss <$> mapM (recursivelyImportExternalClassExpression foreignMod foreignURL) exs
recursivelyImportExternalLabelledDomains :: L2.L2Model -> L2.Identifier -> L2.L2LabelledDomains -> ModelTranslation L2.L2LabelledDomains
recursivelyImportExternalLabelledDomains foreignMod foreignURL (L2.L2LabelledDomains ldl) =
L2.L2LabelledDomains <$>
mapM (\(lab, ex) ->
(,) <$> recursivelyImportExternalLabel foreignMod foreignURL lab
<*> recursivelyImportExternalDomainExpression foreignMod foreignURL ex) ldl
recursivelyImportExternalExpression :: L2.L2Model -> L2.Identifier -> L2.L2Expression -> ModelTranslation L2.L2Expression
recursivelyImportExternalExpression foreignMod foreignURL targetEx =
case targetEx of
L2.L2ExApply ss op arg ->
L2.L2ExApply ss
<$> recursivelyImportExternalExpression foreignMod foreignURL op
<*> recursivelyImportExternalExpression foreignMod foreignURL arg
L2.L2ExReferenceLabel ss l ->
L2.L2ExReferenceLabel ss
<$> recursivelyImportExternalLabel foreignMod foreignURL l
L2.L2ExReferenceValue ss valueID ->
L2.L2ExReferenceValue ss
<$> recursivelyImportExternalValue foreignMod foreignURL valueID
L2.L2ExReferenceClassValue ss cval ->
L2.L2ExReferenceClassValue ss
<$> recursivelyImportExternalClassValue foreignMod foreignURL cval
L2.L2ExBoundVariable ss svid ->
L2.L2ExBoundVariable ss
<$> recursivelyImportExternalScopedValue foreignMod foreignURL svid
L2.L2ExLiteralReal ss uex rv ->
L2.L2ExLiteralReal ss
<$> recursivelyImportExternalUnitExpression foreignMod foreignURL uex
<*> (return rv)
L2.L2ExMkProduct ss vals ->
L2.L2ExMkProduct ss
<$> mapM (\(lab, ex) ->
(,)
<$> recursivelyImportExternalLabel foreignMod foreignURL lab
<*> recursivelyImportExternalExpression foreignMod foreignURL ex) vals
L2.L2ExMkUnion ss l ex ->
L2.L2ExMkUnion ss <$> recursivelyImportExternalLabel foreignMod foreignURL l
<*> recursivelyImportExternalExpression foreignMod foreignURL ex
L2.L2ExUnmkUnion ss l ex ->
L2.L2ExUnmkUnion ss <$> recursivelyImportExternalLabel foreignMod foreignURL l
<*> recursivelyImportExternalExpression foreignMod foreignURL ex
L2.L2ExIsLabel ss l ex ->
L2.L2ExIsLabel ss <$> recursivelyImportExternalLabel foreignMod foreignURL l
<*> recursivelyImportExternalExpression foreignMod foreignURL ex
L2.L2ExProject ss l ->
L2.L2ExProject ss <$> recursivelyImportExternalLabel foreignMod foreignURL l
L2.L2ExAppend ss l ->
L2.L2ExAppend ss <$> recursivelyImportExternalLabel foreignMod foreignURL l
L2.L2ExLambda ss bv val ->
L2.L2ExLambda ss <$> recursivelyImportExternalScopedValue foreignMod foreignURL bv
<*> recursivelyImportExternalExpression foreignMod foreignURL val
L2.L2ExCase ss expr values ->
L2.L2ExCase ss <$> recursivelyImportExternalExpression foreignMod foreignURL expr
<*> mapM (\(l, ex) ->
(,)
<$> recursivelyImportExternalLabel foreignMod foreignURL l
<*> recursivelyImportExternalExpression foreignMod foreignURL ex
) values
L2.L2ExLet ss expr closureNS closureExprs ->
L2.L2ExLet ss <$> recursivelyImportExternalExpression foreignMod foreignURL expr
<*> recursivelyImportExternalNS foreignMod foreignURL closureNS
<*> mapM (recursivelyImportExternalExpression foreignMod foreignURL) closureExprs
L2.L2ExString ss bs -> pure $ L2.L2ExString ss bs
L2.L2ExSignature ss ex sig ->
L2.L2ExSignature ss
<$> recursivelyImportExternalExpression foreignMod foreignURL ex
<*> recursivelyImportExternalDomainExpression foreignMod foreignURL sig
recursivelyImportExternalLabel :: L2.L2Model -> L2.Identifier -> L2.L2Label -> ModelTranslation L2.L2Label
recursivelyImportExternalLabel foreignMod foreignURL (L2.L2Label ens val) =
L2.L2Label <$> recursivelyImportExternalNS foreignMod foreignURL ens
<*> pure val
recursivelyImportExternalValue :: L2.L2Model -> L2.Identifier -> L2.L2ValueID -> ModelTranslation L2.L2ValueID
recursivelyImportExternalValue =
cacheWrapExternalImport mtsForeignToLocalValues (\x m -> m{mtsForeignToLocalValues=x}) $
\foreignMod foreignURL foreignValueID -> do
let foreignValueContents = (fromJust . M.lookup foreignValueID . L2.l2AllValues $ foreignMod)
newValueID <- L2.l2NextValue <$> getL2Model
localValueContents <- L2.L2ValueContents (L2.l2ValueSS foreignValueContents) <$>
maybe (return Nothing)
(\fvc -> Just <$>
recursivelyImportExternalDomainExpression foreignMod foreignURL fvc)
(L2.l2ValueType foreignValueContents)
modifyL2Model $ \mod -> mod {
L2.l2NextValue = (\(L2.L2ValueID i) -> L2.L2ValueID (i + 1)) newValueID,
L2.l2AllValues = M.insert newValueID localValueContents
(L2.l2AllValues mod)
}
return newValueID
recursivelyImportExternalClassValue :: L2.L2Model -> L2.Identifier -> L2.L2ClassValueID -> ModelTranslation L2.L2ClassValueID
recursivelyImportExternalClassValue =
cacheWrapExternalImport mtsForeignToLocalClassValue (\x m -> m{mtsForeignToLocalClassValue=x}) $
\foreignMod foreignURL foreignValueID -> do
let Just (L2.L2ClassValueContents ss dt) = M.lookup foreignValueID . L2.l2AllClassValues $ foreignMod
localValueContents <- L2.L2ClassValueContents ss <$>
(recursivelyImportExternalDomainExpression foreignMod foreignURL dt)
newValueID <- L2.l2NextClassValueID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextClassValueID = (\(L2.L2ClassValueID i) -> L2.L2ClassValueID (i + 1)) newValueID,
L2.l2AllClassValues = M.insert newValueID localValueContents
(L2.l2AllClassValues mod)
}
return newValueID
recursivelyImportExternalClass :: L2.L2Model -> L2.Identifier -> L2.L2ClassID -> ModelTranslation L2.L2ClassID
recursivelyImportExternalClass =
cacheWrapExternalImport mtsForeignToLocalClass (\x m -> m{mtsForeignToLocalClass=x}) $
\foreignMod foreignURL foreignClassID -> do
newClassID <- L2.l2NextClassID <$> getL2Model
let Just (L2.L2ClassContents ss p df vals) = M.lookup foreignClassID . L2.l2AllClasses $ foreignMod
localClassContents <- L2.L2ClassContents ss
<$> (mapM (\(sdid, k) ->
(,) <$>
recursivelyImportExternalScopedDomain foreignMod foreignURL sdid
<*> pure k) p)
<*> T.mapM (\dfid ->
recursivelyImportExternalDomainFunction foreignMod
foreignURL dfid) df
<*> T.mapM (\valueId ->
recursivelyImportExternalClassValue foreignMod foreignURL valueId) vals
modifyL2Model $ \mod -> mod {
L2.l2NextClassID = (\(L2.L2ClassID i) -> L2.L2ClassID (i + 1)) newClassID,
L2.l2AllClasses = M.insert newClassID localClassContents
(L2.l2AllClasses mod)
}
return newClassID
recursivelyImportExternalDomainFunction :: L2.L2Model -> L2.Identifier -> L2.L2DomainFunctionID -> ModelTranslation L2.L2DomainFunctionID
recursivelyImportExternalDomainFunction =
cacheWrapExternalImport mtsForeignToLocalDomainFunction (\x m -> m{mtsForeignToLocalDomainFunction=x}) $
\foreignMod foreignURL foreignDF -> do
newDFID <- L2.l2NextDomainFunctionID <$> getL2Model
-- No need for any translation...
let Just localDFContents = M.lookup foreignDF (L2.l2AllDomainFunctions foreignMod)
modifyL2Model $ \mod -> mod {
L2.l2NextDomainFunctionID = (\(L2.L2DomainFunctionID i) -> L2.L2DomainFunctionID (i + 1)) newDFID,
L2.l2AllDomainFunctions = M.insert newDFID localDFContents
(L2.l2AllDomainFunctions mod)
}
return newDFID
-- | Process internal imports until either all imports are resolved, or an
-- error such as a cycle or missing identifier is found.
processInternalImports :: L2.L2NamespaceID -> ModelTranslation ()
processInternalImports startFrom = do
allNS <- S.insert startFrom <$> findNamespaceDescendents "pii" startFrom
ModelTranslationState { mtsL2ToL1Map = nsmap } <- get
-- Make a list of namespaces that actually have local imports to process...
let
isFinished nsid = do
Just (L1.L1NamespaceContents l1nsc) <- M.lookup nsid . mtsL2ToL1Map <$> get
return $ not (any isLocalImport l1nsc)
isLocalImport (L1.L1NSImport { L1.l1nsImportFrom = Nothing }) = True
isLocalImport _ = False
(pending, finished) <- (\(a, b) -> (S.fromList a, S.fromList b)) <$> partitionM isFinished allNS
let
processInternalImports'' pending finished =
if S.null pending
then return ()
else do
let chosen = head (S.elems pending)
(pending', finished') <-
processInternalImports' [chosen] (S.delete chosen pending) finished
processInternalImports'' pending' finished'
processInternalImports' :: [L2.L2NamespaceID] -> S.Set L2.L2NamespaceID -> S.Set L2.L2NamespaceID ->
ModelTranslation (S.Set L2.L2NamespaceID, S.Set L2.L2NamespaceID)
processInternalImports' stack@(curNSID:_) pending finished = do
let (Just (L1.L1NamespaceContents l1nsc)) = M.lookup curNSID nsmap
flip (flip foldM (pending, finished)) l1nsc $ \(pending, finished) st -> case st of
L1.L1NSImport { L1.l1nsSS = ss,
L1.l1nsImportFrom = Nothing,
L1.l1nsImportPath = path,
L1.l1nsImportWhat = what,
L1.l1nsImportHiding = hiding } -> do
childNSID <- findScopedSymbolByRAPath ss curNSID path
(\nsc ident -> M.lookup (L1.l1IdBS ident) (L2.l2nsNamespaces nsc))
when (childNSID `elem` stack) $ do
paths <- intercalate ", which is imported by " <$> mapM (nsidToFriendlyName nsMain)
((childNSID:(takeWhile (/= childNSID) stack)) ++ [childNSID])
fail $ "Illegal import cycle found involving namespace " ++
paths ++ ", at " ++ (show ss)
(pending', finished') <-
if (childNSID `S.member` finished)
then return (pending, finished)
else processInternalImports' (childNSID:stack) (S.delete childNSID pending) finished
l2model <- getL2Model
childNSC <- getNamespaceContents "internal imports - child NS" childNSID
importSyms <- importListFromWhatAndHiding childNSID l2model what hiding
forM_ importSyms $ \(L1.L1Identifier _ importSym) -> do
-- TODO - check importSym doesn't already exist in curNSID.
let
importSomething :: (L2.L2NamespaceContents -> M.Map L2.Identifier a) ->
(L2.L2NamespaceContents -> M.Map L2.Identifier a -> L2.L2NamespaceContents) ->
ModelTranslation ()
importSomething getter setter =
case M.lookup importSym (getter childNSC) of
Nothing -> return ()
Just impSomething ->
modifyNamespaceContents curNSID (\nsc -> setter nsc (M.insert importSym impSomething (getter nsc)))
importSomething L2.l2nsNamespaces (\nsc x -> nsc { L2.l2nsNamespaces = x })
importSomething L2.l2nsDomains (\nsc x -> nsc { L2.l2nsDomains = x })
importSomething L2.l2nsNamedValues (\nsc x -> nsc { L2.l2nsNamedValues = x })
importSomething L2.l2nsClassValues (\nsc x -> nsc { L2.l2nsClassValues = x })
importSomething L2.l2nsUnits (\nsc x -> nsc { L2.l2nsUnits = x })
importSomething L2.l2nsClasses (\nsc x -> nsc { L2.l2nsClasses = x })
importSomething L2.l2nsDomainFunctions (\nsc x -> nsc { L2.l2nsDomainFunctions = x })
importSomething L2.l2nsLabels (\nsc x -> nsc { L2.l2nsLabels = x })
return (pending', S.insert curNSID finished')
processInternalImports'' pending finished
-- | Recursively translates a model, starting from the specified namespace.
recursivelyTranslateModel :: L2.L2NamespaceID -> ModelTranslation ()
recursivelyTranslateModel thisNSID = do
Just (L1.L1NamespaceContents l1nsc) <- (M.lookup thisNSID . mtsL2ToL1Map) <$> get
translateNSContents def thisNSID l1nsc
-- | Updates the contents of a namespace with the actual definitions, replacing
-- the temporary 'skeleton' definitions.
translateNSContents :: ScopeInformation -> L2.L2NamespaceID -> [L1.L1NamespaceStatement] -> ModelTranslation ()
translateNSContents scope thisNSID nsc =
forM_ nsc $ \nss ->
case nss of
-- Note: We assume all remaining imports are local due to
-- processNamespaceImports, and have no 'as' clause because
-- of handleL1SimpleSyntacticSugar...
L1.L1NSImport {} ->
-- Imports are already done.
return ()
L1.L1NSNamespace { L1.l1nsNamespaceName = nsNameL1@(L1.L1Identifier idss nsName) } -> do
l2nsc <- getNamespaceContents "translateNSContents - namespace" thisNSID
let Just childNSID = (M.lookup nsName . L2.l2nsNamespaces) l2nsc
Just (L1.L1NamespaceContents childNSC) <- (M.lookup childNSID . mtsL2ToL1Map) <$> get
translateNSContents scope childNSID childNSC
Just (L2.L2DomainReference _ tmpId) <-
M.lookup nsName . L2.l2nsDomains <$> getNamespaceContents "translateNSContents-nsdr" thisNSID
labs <- (L2.L2DomainExpressionDisjointUnion idss . L2.L2LabelledDomains .
(\n -> [(L2.L2Label childNSID (fromIntegral i), L2.L2DomainExpressionProduct idss (L2.L2LabelledDomains [])) |
i <- [0..(L2.l2nsNextLabel n)]])) <$>
getNamespaceContents "translateNSContents DU" childNSID
modify $ \mts -> mts { mtsTempIDDomainType = M.insert tmpId labs
(mtsTempIDDomainType mts) }
return ()
L1.L1NSDomain { L1.l1nsSS = ss,
L1.l1nsDomainName = L1.L1Identifier idss ident,
L1.l1nsDomainDefinition = dd } -> do
Just domNSID <- findExactNamespace thisNSID ident
domStatement <- translateDomainDefinition scope ss domNSID dd
Just (L2.L2DomainReference _ tmpId) <-
M.lookup ident . L2.l2nsDomains <$> getNamespaceContents "tnsc domref" thisNSID
modify $ \mts -> mts { mtsTempIDDomainType = M.insert tmpId domStatement (mtsTempIDDomainType mts) }
modifyNamespaceContents thisNSID $ \l2nsc -> l2nsc {
L2.l2nsDomains = M.insert ident domStatement (L2.l2nsDomains l2nsc)
}
L1.L1NSAssertion { L1.l1nsSS = ss, L1.l1nsExpression = ex } -> do
l2ex <- translateExpression scope ss thisNSID ex
modifyL2Model $ \mod -> mod { L2.l2AllAssertions = l2ex:(L2.l2AllAssertions mod) }
L1.L1NSNamedValue { L1.l1nsSS = ss, L1.l1nsValueName = L1.L1Identifier { L1.l1IdBS = n },
L1.l1nsDomainType = mt } -> do
l2t <- maybe (return Nothing) (\t -> Just <$> translateDomainExpression scope ss thisNSID t) mt
Just valueID <- M.lookup n . L2.l2nsNamedValues <$> getNamespaceContents "tnsc nv" thisNSID
modifyL2Model $ \mod -> mod {
L2.l2AllValues = M.insert valueID (L2.L2ValueContents ss l2t) (L2.l2AllValues mod)
}
L1.L1NSClass { L1.l1nsSS = ss, L1.l1nsClassName = L1.L1Identifier _ n,
L1.l1nsClassParameters = p,
L1.l1nsClassDomainFunctions = df, L1.l1nsClassValues = cvs } -> do
Just classID <- M.lookup n . L2.l2nsClasses <$> getNamespaceContents "tnsc c" thisNSID
Just classContents <- (M.lookup classID . L2.l2AllClasses) <$> getL2Model
let scope' = scope { siDomainIDMap = foldl' (\m ((l1id, _), (l2id, _)) ->
M.insert l1id l2id m) (siDomainIDMap scope) (zip p (L2.l2ClassParameters classContents))}
forM_ cvs $ \(L1.L1Identifier cvss cvName, cvtype) -> do
let Just cvID = M.lookup cvName (L2.l2ClassValues classContents)
l2cvtype <- translateDomainExpression scope' ss thisNSID cvtype
modifyL2Model $ \mod -> mod {
L2.l2AllClassValues = M.insert cvID (L2.L2ClassValueContents cvss l2cvtype)
(L2.l2AllClassValues mod)
}
L1.L1NSEnsemble {} -> return ()
L1.L1NSUnit { L1.l1nsSS = ss, L1.l1nsUnitName = L1.L1Identifier uss n, L1.l1nsUnitDefinition = d } -> do
l2uDef <- translateUnitDefinition scope ss thisNSID d
Just (L2.L2UnitExRef _ tmpId) <- M.lookup n . L2.l2nsUnits <$> getNamespaceContents "tnsc u" thisNSID
modify $ \mts -> mts { mtsTempIDUnitEx = M.insert tmpId l2uDef (mtsTempIDUnitEx mts) }
modifyNamespaceContents thisNSID $ \l2nsc -> l2nsc {
L2.l2nsUnits = M.insert n l2uDef (L2.l2nsUnits l2nsc)
}
L1.L1NSInstance { L1.l1nsSS = ss, L1.l1nsInstanceOfClass = cpath,
L1.l1nsClassArguments = args, L1.l1nsInstanceDomainFunctions = dfs,
L1.l1nsInstanceValues = vals } -> do
-- Find the class...
classID <-
(findScopedSymbolByRAPath ss thisNSID cpath $ \nsc className ->
M.lookup (L1.l1IdBS className) (L2.l2nsClasses nsc))
Just class' <- (M.lookup classID . L2.l2AllClasses) <$> getL2Model
l2args <- mapM (translateDomainExpression scope ss thisNSID) args
l2dfs <- mapM (\(L1.L1Identifier ifss instfuncident, dts, dex) ->
(,,) <$>
(case M.lookup instfuncident (L2.l2ClassDomainFunctions class') of
Nothing -> fail $ "Instance refers to domain function " ++
(BSC.unpack instfuncident) ++ " at " ++
(show ifss) ++ ", but that domain function " ++
"could not be found in the class."
Just ifid -> return ifid
) <*>
(mapM (translateDomainExpression scope ss thisNSID) dts) <*>
translateDomainExpression scope ss thisNSID dex
) dfs
l2exprs <- mapM (translateExpression scope ss thisNSID) vals
modifyL2Model $ \mod ->
mod { L2.l2AllInstances =
(L2.L2InstanceContents { L2.l2InstanceSS = ss,
L2.l2InstanceOfClass = classID,
L2.l2InstanceClassArguments = l2args,
L2.l2InstanceDomainFunctions = l2dfs,
L2.l2InstanceValues = l2exprs }):
(L2.l2AllInstances mod) }
return ()
-- | Translates an L1Label to an L2Expression.
translateLabelEx :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID -> L1.L1RelOrAbsPathPossiblyIntEnd -> ModelTranslation L2.L2Expression
translateLabelEx scope _ nsid (L1.L1RelOrAbsPathInt pss ra rp v) = do
nsid' <- findNamespaceByRAPath pss nsid (L1.L1RelOrAbsPath pss ra rp)
when (nsid' /= nsInteger && nsid' /= nsNatural) . fail $
"Literal integer syntax can only be used to refer to labels in the \
\built in Integer and Natural domains(e.g. Natural:1), so usage at " ++
(show pss) ++ " is invalid."
when (nsid' == nsNatural && v < 0) . fail $
"Attempt to reference a negative natural number, at " ++ (show pss)
return . L2.L2ExReferenceLabel pss $ L2.L2Label nsid' (fromIntegral v)
translateLabelEx scope _ nsid (L1.L1RelOrAbsPathNoInt pss isabs rp) = do
let ra = L1.L1RelOrAbsPath pss isabs rp
findScopedSymbolByRAPath pss nsid ra $ \nsc labelName ->
case M.lookup (L1.l1IdBS labelName) (L2.l2nsLabels nsc) of
Just l -> Just $ L2.L2ExReferenceLabel pss l
Nothing ->
case M.lookup (L1.l1IdBS labelName) (L2.l2nsNamedValues nsc) of
Just nv -> Just $ L2.L2ExReferenceValue pss nv
Nothing -> case M.lookup (L1.l1IdBS labelName) (L2.l2nsClassValues nsc) of
Just cv -> Just $ L2.L2ExReferenceClassValue pss cv
Nothing -> Nothing
-- | Translates an L1Label to an L2Label, giving an appropriate error if the
-- label is something other than an ensemble label.
translateLabelOnly context scope ss nsid p = do
r <- translateLabelEx scope ss nsid p
case r of
L2.L2ExReferenceLabel _ l -> return l
_ -> fail $ "Expected a label, not a value, in " ++ context ++ " at " ++ show (L1.l1RelOrAbsPIESS p)
-- | Translates an L1Expression to a L2Expression.
translateExpression :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID -> L1.L1Expression -> ModelTranslation L2.L2Expression
translateExpression scope _ nsid (L1.L1ExApply ss op arg) =
L2.L2ExApply ss <$> translateExpression scope ss nsid op
<*> translateExpression scope ss nsid arg
translateExpression scope _ nsid (L1.L1ExReference ss l) =
translateLabelEx scope ss nsid l
translateExpression scope _ _ (L1.L1ExBoundVariable ss scopedID@(L1.L1ScopedID idss _)) =
L2.L2ExBoundVariable ss <$>
maybe (fail $ "Reference to unknown local variable at " ++ show idss)
return (M.lookup scopedID (siValueIDMap scope))
translateExpression scope _ nsid (L1.L1ExLiteralReal ss units rv) =
L2.L2ExLiteralReal ss
<$> translateUnitExpression scope ss nsid units
<*> (pure rv)
translateExpression scope _ nsid (L1.L1ExMkProduct ss values) =
L2.L2ExMkProduct ss
<$> mapM (\(l, ex) ->
(,) <$> translateLabelOnly "product label" scope ss nsid l
<*> translateExpression scope ss nsid ex
) values
translateExpression scope _ nsid (L1.L1ExMkUnion ss label value) =
L2.L2ExMkUnion ss
<$> translateLabelOnly "union label" scope ss nsid label
<*> translateExpression scope ss nsid value
translateExpression scope _ nsid (L1.L1ExUnmkUnion ss label value) =
L2.L2ExUnmkUnion ss
<$> translateLabelOnly "union label" scope ss nsid label
<*> translateExpression scope ss nsid value
translateExpression scope _ nsid (L1.L1ExIsLabel ss label value) =
L2.L2ExIsLabel ss
<$> translateLabelOnly "union label" scope ss nsid label
<*> translateExpression scope ss nsid value
translateExpression scope _ nsid (L1.L1ExProject ss label) =
L2.L2ExProject ss
<$> translateLabelOnly "projection label" scope ss nsid label
translateExpression scope _ nsid (L1.L1ExAppend ss label) =
L2.L2ExAppend ss
<$> translateLabelOnly "append label" scope ss nsid label
translateExpression scope _ nsid (L1.L1ExLambda ss (L1.L1PatternBind _ bvar) value) = do
newBVar <- L2.l2NextScopedValueID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextScopedValueID = (\(L2.L2ScopedValueID n) ->
L2.L2ScopedValueID (n + 1)) newBVar }
L2.L2ExLambda ss newBVar <$>
translateExpression (scope { siValueIDMap = M.insert bvar newBVar (siValueIDMap scope)})
ss nsid value
translateExpression _ _ _ (L1.L1ExLambda ss _ _) = do
fail $ "Internal Compiler Error: Non-simple lambda pattern remains after desugar, at " ++ (show ss)
translateExpression scope _ nsid (L1.L1ExCase ss expr (Left values)) =
L2.L2ExCase ss
<$> translateExpression scope ss nsid expr
<*> mapM (\(l, cex) ->
(,)
<$> translateLabelOnly "case label" scope ss nsid l
<*> translateExpression scope ss nsid cex) values
translateExpression _ _ _ (L1.L1ExFCase ss _ _) =
fail $ "Internal Compiler Error - fcase remains after desugar, at " ++ (show ss)
translateExpression _ _ _ (L1.L1ExCase ss _ (Right _)) =
fail $ "Internal Compiler Error - non-closed case remains after desugar, at " ++ (show ss)
translateExpression scope _ nsid (L1.L1ExLet ss expr closure) = do
letNS <- registerNewNamespace ss nsid closure
let L1.L1NamespaceContents closureList = closure
buildSkeletonModel ss closure letNS
(_, nsAsserts) <- isolateAssertions (translateNSContents scope letNS closureList)
L2.L2ExLet ss
<$> translateExpression scope ss letNS expr
<*> (pure letNS)
<*> (pure nsAsserts)
translateExpression scope _ nsid (L1.L1ExString ss sv) =
return $ L2.L2ExString ss sv
translateExpression scope _ nsid (L1.L1ExSignature ss ex sig) = do
L2.L2ExSignature ss <$> (translateExpression scope ss nsid ex)
<*> (translateDomainExpression scope ss nsid sig)
-- | Translates an L1UnitExpression to a L2UnitExpression.
translateUnitExpression :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID -> L1.L1UnitExpression -> ModelTranslation L2.L2UnitExpression
translateUnitExpression scope _ nsid (L1.L1UnitExDimensionless ss) =
pure $ L2.L2UnitExDimensionless ss
translateUnitExpression scope _ nsid (L1.L1UnitExRef ss refpath) =
findScopedSymbolByRAPath ss nsid refpath $ \nsc unitName -> M.lookup (L1.l1IdBS unitName) (L2.l2nsUnits nsc)
translateUnitExpression scope _ nsid (L1.L1UnitExTimes ss expr1 expr2) =
L2.L2UnitExTimes ss <$> translateUnitExpression scope ss nsid expr1
<*> translateUnitExpression scope ss nsid expr2
translateUnitExpression scope _ nsid (L1.L1UnitPow ss expr1 powerOf) =
L2.L2UnitPow ss <$> (translateUnitExpression scope ss nsid expr1)
<*> (pure powerOf)
translateUnitExpression scope _ nsid (L1.L1UnitScalarMup ss scalv expr1) =
L2.L2UnitScalarMup ss scalv <$> (translateUnitExpression scope ss nsid expr1)
translateUnitExpression scope _ nsid (L1.L1UnitScopedVar ss scopedv) =
L2.L2UnitScopedVar ss <$>
maybe (fail $ "Reference to unknown scoped units name " ++ (BSC.unpack . L1.l1ScopedIdBS $ scopedv) ++ " at " ++ (show ss)) return (M.lookup scopedv (siUnitIDMap scope))
-- | Translates an L1UnitDefinition to an L2UnitDefinition
translateUnitDefinition :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID -> L1.L1UnitDefinition -> ModelTranslation L2.L2UnitExpression
translateUnitDefinition scope _ nsid (L1.L1UnitDefNewBase ss) = do
buID <- L2.l2NextBaseUnits <$> getL2Model
modifyL2Model $ \mod ->
mod { L2.l2NextBaseUnits = (\(L2.L2BaseUnitsID n) -> L2.L2BaseUnitsID (n + 1)) buID,
L2.l2AllBaseUnits = M.insert buID (L2.L2BaseUnitContents ss) (L2.l2AllBaseUnits mod) }
return $ L2.L2UnitExRef ss buID
translateUnitDefinition scope _ nsid (L1.L1UnitDefUnitExpr ss expr) =
translateUnitExpression scope ss nsid expr
-- | Translates an L1DomainExpression to an L2DomainExpression
translateDomainExpression :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID -> L1.L1DomainExpression -> ModelTranslation L2.L2DomainExpression
translateDomainExpression scope ss nsid ex = fst <$> translateDomainExpressionCrossScope scope ss nsid ex
-- | Translates an L1DomainExpression to an L2DomainExpression, and also returns the scope inside the outermost
-- domain expression.
translateDomainExpressionCrossScope :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID -> L1.L1DomainExpression -> ModelTranslation (L2.L2DomainExpression, ScopeInformation)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainExpressionProduct ss labels) =
(,) <$> (L2.L2DomainExpressionProduct ss <$> translateLabelledDomains scope ss nsid labels) <*> (pure scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainExpressionDisjointUnion ss labels) =
(,) <$> (L2.L2DomainExpressionDisjointUnion ss <$> translateLabelledDomains scope ss nsid labels)
<*> (pure scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainExpressionFieldSignature ss dom cod) =
(,) <$> (L2.L2DomainExpressionFieldSignature ss <$> translateDomainExpression scope ss nsid dom
<*> translateDomainExpression scope ss nsid cod
) <*> (pure scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainExpressionReal ss units) =
(,) <$> (L2.L2DomainExpressionReal ss <$> translateUnitExpression scope ss nsid units)
<*> (pure scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainExpressionApply ss args value) = do
let processOneArg (domArgs, unitArgs) (L1.L1ScopedID _ sdName, Left dex) = do
domArg <- ((,) (L2.L2ScopedDomainID sdName 0)) <$>
translateDomainExpression scope ss nsid dex
return (domArg : domArgs, unitArgs)
processOneArg (domArgs, unitArgs) (L1.L1ScopedID _ sdName, Right uex) = do
unitArg <- (,) (L2.L2ScopedUnitID sdName 0) <$> translateUnitExpression scope ss nsid uex
return (domArgs, unitArg : unitArgs)
(domArgs, unitArg : unitArgs) <- foldM processOneArg ([], []) args
(,)
<$> (L2.L2DomainExpressionApply ss domArgs unitArgs <$> translateDomainExpression scope ss nsid value)
<*> (pure scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainFunctionEvaluate ss func args) =
(,) <$>
(L2.L2DomainFunctionEvaluate ss
<$> findScopedSymbolByRAPath ss nsid func
(\nsc dfName ->
M.lookup (L1.l1IdBS dfName) (L2.l2nsDomainFunctions nsc)
)
<*> mapM (translateDomainExpression scope ss nsid) args
) <*> (pure scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainVariableRef ss sdName) =
case (M.lookup sdName . siDomainIDMap) scope of
Nothing ->
fail ("Cannot find scoped domain variable " ++ BSC.unpack (L1.l1ScopedIdBS sdName) ++ " referenced at " ++
show ss ++ " in scope " ++ show scope)
Just scopedDomain ->
return $ (L2.L2DomainVariableRef ss scopedDomain, scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainReference ss ([email protected]{})) =
fail $ "Attempt to use a label ending in an integer as a domain name, which is invalid, at " ++ (show ss)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainReference ss (L1.L1RelOrAbsPathNoInt _ isabs rp)) = do
domainType <-
findScopedSymbolByRAPath ss nsid (L1.L1RelOrAbsPath ss isabs rp) $
\nsc domainName ->
M.lookup (L1.l1IdBS domainName) (L2.l2nsDomains nsc)
return (domainType, scope)
translateDomainExpressionCrossScope scope _ nsid (L1.L1DomainExpressionLambda ss dh dexpr) = do
let processDomainHeadMember (scope, sd, su) (L1.L1DHMScopedDomain name kind) = do
sdid <- setScopedDomainName (L1.l1ScopedIdBS name) . L2.l2NextScopedDomainID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextScopedDomainID = (\(L2.L2ScopedDomainID _ v) -> L2.L2ScopedDomainID "!unknown" (v + 1)) sdid
}
return (scope { siDomainIDMap = M.insert name sdid (siDomainIDMap scope) }, (sdid, kind) : sd, su)
processDomainHeadMember (scope, sd, su) (L1.L1DHMScopedUnit name) = do
suid <- setScopedUnitName (L1.l1ScopedIdBS name) . L2.l2NextScopedUnitID <$> getL2Model
modifyL2Model $ \mod -> mod {
L2.l2NextScopedUnitID = (\(L2.L2ScopedUnitID _ v) -> L2.L2ScopedUnitID "!unknown" (v + 1)) suid
}
return (scope { siUnitIDMap = M.insert name suid (siUnitIDMap scope) }, sd, suid : su)
processDomainHeadMember x _ = return x
(scope', scopedDomains, scopedUnits) <-
foldM processDomainHeadMember (scope, [], []) dh
let processDomainHeadMember' (uc, de, dr) (L1.L1DHMUnitConstraint uex1 uex2) = do
uex1' <- translateUnitExpression scope' ss nsid uex1
uex2' <- translateUnitExpression scope' ss nsid uex2
return ((uex1', uex2'):uc, de, dr)
processDomainHeadMember' (uc, de, dr) (L1.L1DHMEquality dex1 dex2) = do
dex1' <- translateDomainExpression scope' ss nsid dex1
dex2' <- translateDomainExpression scope' ss nsid dex2
return (uc, (dex1', dex2'):de, dr)
processDomainHeadMember' (uc, de, dr) (L1.L1DHMRelation classex args) = do
l2ClassEx <- translateClassExpression scope' ss nsid classex
args' <- mapM (translateDomainExpression scope' ss nsid) args
return (uc, de, (l2ClassEx, args'):dr)
processDomainHeadMember' x _ = return x
(uConstrs, dEqs, dRels) <-
foldM processDomainHeadMember' ([], [], []) dh
dexpr2 <- translateDomainExpression scope' ss nsid dexpr
return $ (L2.L2DomainExpressionLambda ss scopedDomains scopedUnits uConstrs dEqs dRels dexpr2, scope')
-- | Translates an L1ClassExpression into an L2ClassExpression
translateClassExpression :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID -> L1.L1ClassExpression -> ModelTranslation L2.L2ClassExpression
translateClassExpression scope _ nsid (L1.L1ClassExpressionReference ss cpath) = do
classID <-
(findScopedSymbolByRAPath ss nsid cpath $ \nsc className ->
M.lookup (L1.l1IdBS className) (L2.l2nsClasses nsc))
return $ L2.L2ClassExpressionReference ss classID
translateClassExpression scope _ nsid (L1.L1ClassExpressionOpenDisjointUnion ss labels) =
L2.L2ClassExpressionOpenDisjointUnion ss <$> translateLabelledDomains scope ss nsid labels
translateClassExpression scope _ nsid (L1.L1ClassExpressionList ss exlist) =
L2.L2ClassExpressionList ss <$> (mapM (translateClassExpression scope ss nsid) exlist)
-- | Translates an L1LabeleldDomains into an L2LabelledDomains.
translateLabelledDomains :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID ->
L1.L1LabelledDomains -> ModelTranslation L2.L2LabelledDomains
translateLabelledDomains scope ss nsid (L1.L1LabelledDomains ls) =
L2.L2LabelledDomains
<$> mapM (\(l, ex) ->
(,) <$> translateLabelOnly "labelled domains" scope ss nsid l
<*> translateDomainExpression scope ss nsid ex
) ls
-- | Produces a L2DomainType linking to a new L2ClonelikeDomainContents.
makeClonelikeDomain :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID ->
L1.L1DomainExpression -> (ScopeInformation -> ModelTranslation L2.L2DomainCloneType) ->
ModelTranslation L2.L2DomainExpression
makeClonelikeDomain scope ss nsid dt ct = do
domID <- L2.l2NextDomain <$> getL2Model
(domEx, scope') <- translateDomainExpressionCrossScope scope ss nsid dt
cldc <- L2.L2ClonelikeDomainContents ss domEx
<$> (ct scope')
modifyL2Model $ \mod -> mod {
L2.l2NextDomain = (\(L2.L2DomainID n) -> L2.L2DomainID (n + 1)) domID,
L2.l2AllDomains = M.insert domID cldc (L2.l2AllDomains mod)
}
return $ L2.L2DomainReference ss domID
-- | Translates an L1DomainDefinition to an L2DomainType, registering new domains
-- as required for the definition.
translateDomainDefinition :: ScopeInformation -> L1.SrcSpan -> L2.L2NamespaceID ->
L1.L1DomainDefinition -> ModelTranslation L2.L2DomainExpression
translateDomainDefinition scope _ nsid
(L1.L1CloneDomain ss dt) =
makeClonelikeDomain scope ss nsid dt (const . return $ L2.L2DomainClone)
translateDomainDefinition scope _ nsid (L1.L1SubsetDomain ss dt ex) =
makeClonelikeDomain scope ss nsid dt
(\scope' -> L2.L2DomainSubset <$> (translateExpression scope' ss nsid ex))
translateDomainDefinition scope _ nsid
(L1.L1ConnectDomain ss dt ex) =
makeClonelikeDomain scope ss nsid dt
(\scope' -> L2.L2DomainConnect <$> (translateExpression scope' ss nsid ex))
translateDomainDefinition scope _ nsid
(L1.L1DomainDefDomainType ss dt) =
translateDomainExpression scope ss nsid dt
-- | Attempts to globally replace all temporary aliases for types with their
-- true values, or fails with an error for the user if a cycle is found.
fixAliasReferences :: L2.L2NamespaceID -> ModelTranslation ()
fixAliasReferences nsid = do
initialMaps@(tmpdts, tmpunitexs) <- (\mts -> (mtsTempIDDomainType mts, mtsTempIDUnitEx mts)) <$> get
let tmpList = map Left (M.keys tmpdts) ++ map Right (M.keys tmpunitexs)
((finaldts, finalunitexs), _) <- flip (flip foldM (initialMaps, S.empty)) tmpList $
\(currentMaps, done) toFix ->
tryResolveOneMapEntry [] currentMaps done toFix
modifyL2Model $ \mod ->
transformBi (replaceTempDomainEx finaldts) .
transformBi (replaceTempUnitEx finalunitexs) $ mod
-- | Applies a substitution map to domain type.
replaceTempDomainEx :: M.Map L2.L2DomainID L2.L2DomainExpression -> L2.L2DomainExpression -> L2.L2DomainExpression
replaceTempDomainEx mSubst (L2.L2DomainReference ss tmpId)
| Just domainType <- M.lookup tmpId mSubst = domainType
replaceTempDomainEx _ x = x
-- | Replaces a temporary unit ID reference with the expression it resolves to.
replaceTempUnitEx :: M.Map L2.L2BaseUnitsID L2.L2UnitExpression -> L2.L2UnitExpression -> L2.L2UnitExpression
replaceTempUnitEx m (L2.L2UnitExRef ss buid)
| Just uex <- M.lookup buid m = uex
replaceTempUnitEx _ x = x
-- | Attempts to change an entry in temporary maps into its final form, making
-- all prerequisite entries into their final form in the process.
tryResolveOneMapEntry :: [Either L2.L2DomainID L2.L2BaseUnitsID] ->
(M.Map L2.L2DomainID L2.L2DomainExpression, M.Map L2.L2BaseUnitsID L2.L2UnitExpression) ->
S.Set (Either L2.L2DomainID L2.L2BaseUnitsID) ->
Either L2.L2DomainID L2.L2BaseUnitsID ->
ModelTranslation ((M.Map L2.L2DomainID L2.L2DomainExpression,
M.Map L2.L2BaseUnitsID L2.L2UnitExpression),
S.Set (Either L2.L2DomainID L2.L2BaseUnitsID))
tryResolveOneMapEntry stack currentMaps@(dm,um) done toFix
| toFix `S.member` done = return (currentMaps, done)
| toFix `elem` stack = do
let describeMapEntry (Left dt) = "domain type at " ++ (show $ L2.l2DomainExpressionSS (dm!dt))
describeMapEntry (Right uex) = "units expression at " ++ (show $ L2.l2UnitExSS (um!uex))
let paths = intercalate ", which is referenced by "
(map describeMapEntry
((toFix:(takeWhile (/= toFix) stack)) ++ [toFix]))
fail $ "Aliases form a cycle, which is invalid: " ++ paths
| otherwise =
let
initialContents = either (Left . flip M.lookup dm) (Right . flip M.lookup um) toFix
allDeps = [Left domId | L2.L2DomainReference { L2.l2DomainExpressionRef = domId } <-
universeBi initialContents, domId `M.member` dm] ++
[Right buid | L2.L2UnitExRef { L2.l2UnitExRef = buid } <-
universeBi initialContents, buid `M.member` um]
in do
(finalMaps@(dm, um), done') <-
foldM (\(currentMaps, done) dep -> tryResolveOneMapEntry (toFix:stack) currentMaps done dep)
(currentMaps, done) allDeps
let
fixup :: Data a => a -> a
fixup =
transformBi (replaceTempDomainEx dm) . transformBi (replaceTempUnitEx um)
return (either (\l -> (M.update (Just . fixup) l dm, um))
(\r -> (dm, M.update (Just . fixup) r um)) toFix,
S.insert toFix done)
-- | Maps a namespace ID to a 'friendly name' that can be displayed to the user.
-- The context namespace is used so that when there are several options the
-- most appropriate choice can be shown.
nsidToFriendlyName :: L2.L2NamespaceID -> L2.L2NamespaceID -> ModelTranslation String
nsidToFriendlyName context target
| context == target = return "main namespace"
| otherwise = do
n <- BSC.unpack <$>
(fromMaybe "anonymous namespace" <$> (nsidToFriendlyName' context target))
nsc <- getNamespaceContents "friendlyname" target
return $ n ++ (" at " ++ show (L2.l2nsSrcSpan nsc))
nsidToFriendlyName' context target = do
allNS <- (M.toList . L2.l2nsNamespaces) <$> getNamespaceContents "friendlyname'" context
foldM (\m (nsname, nsid) -> (liftM (mplus m)) (if nsid == target
then return (Just nsname)
else (liftM $ ((nsname <> "::") <>)) <$>
nsidToFriendlyName' nsid target))
Nothing allNS
-- Utility functions for working with L2 models...
-- | Fetch the current L2 model out of the state.
getL2Model :: ModelTranslation L2.L2Model
getL2Model = mtsL2Model <$> get
-- | Modify the L2 model in the state.
modifyL2Model :: (L2.L2Model -> L2.L2Model) -> ModelTranslation ()
modifyL2Model f = modify $ \mts -> mts { mtsL2Model = f (mtsL2Model mts) }
-- | Fetch the contents of a namespace using the namespace ID.
getNamespaceContents :: String -> L2.L2NamespaceID -> ModelTranslation L2.L2NamespaceContents
getNamespaceContents strWhy nsid = do
maybeNS <- (M.lookup nsid . L2.l2AllNamespaces) <$> getL2Model
case maybeNS of
Nothing -> error $ "Internal Compiler Error: Can't find namespace ID " ++ show nsid ++ " in model for "
++ strWhy
Just v -> return v
-- | Modify the contents of a namespace using namespace ID.
modifyNamespaceContents :: L2.L2NamespaceID -> (L2.L2NamespaceContents -> L2.L2NamespaceContents) -> ModelTranslation ()
modifyNamespaceContents ns f =
modifyL2Model $ \mod ->
mod {
L2.l2AllNamespaces = M.update (Just . f) ns (L2.l2AllNamespaces mod)
}
-- | Unconditionally register a new namespace.
registerNewNamespace :: L2.SrcSpan -> L2.L2NamespaceID -> L1.L1NamespaceContents -> ModelTranslation L2.L2NamespaceID
registerNewNamespace ss parent nsc = do
nsID <- L2.l2NextNamespace <$> getL2Model
modify $ \mts ->
(
mts { mtsL2Model = (mtsL2Model mts) {
L2.l2AllNamespaces = M.insert nsID (blankNamespaceContents ss parent) (L2.l2AllNamespaces (mtsL2Model mts)),
L2.l2NextNamespace = (\(L2.L2NamespaceID v) -> L2.L2NamespaceID (v + 1))
(L2.l2NextNamespace (mtsL2Model mts))
},
mtsL2ToL1Map = M.insert nsID nsc (mtsL2ToL1Map mts)
}
)
return nsID
-- | Finds a particular namespace by name in a particular namespace. Note:
-- This does not do scope resolution, i.e. does not search for the
-- namespace in parent namespaces, and so should only be used when you know
-- exactly where the namespace should be.
findExactNamespace :: L2.L2NamespaceID -> BS.ByteString -> ModelTranslation (Maybe L2.L2NamespaceID)
findExactNamespace parentNS nsName = do
m <- mtsL2Model <$> get
let Just pns = M.lookup parentNS (L2.l2AllNamespaces m)
return $ M.lookup nsName (L2.l2nsNamespaces pns)
-- | Takes an optional list of what to import to import, an optional list of what to hide, and a target namespace and model,
-- and builds a list of symbols to import. Fails with an error if there is a symbol in the what or hiding list which
-- doesn't actually exist.
importListFromWhatAndHiding ::
Monad m => L2.L2NamespaceID -> L2.L2Model -> Maybe [L1.L1Identifier] -> Maybe [L1.L1Identifier] -> m [L1.L1Identifier]
importListFromWhatAndHiding nsID m what hiding = do
let impList = allSymbolNames nsID m
impSet = S.fromList impList
whatSet = S.fromList (fromMaybe [] what)
hidingSet = S.fromList (fromMaybe [] hiding)
missingSyms = (whatSet `S.union` hidingSet) `S.difference` impSet
when (not (S.null missingSyms)) . fail $
"Import statement mentions the following symbols which don't exist: " ++
(intercalate ", " $
map (\(L1.L1Identifier ss n) ->
(BSC.unpack n) ++ " at " ++ (show ss)) $ S.toList missingSyms)
let finalSet = case (what, hiding) of
(Nothing, Nothing) -> impSet
(Just _, Nothing) -> whatSet
(Nothing, Just _) -> impSet `S.difference` hidingSet
(Just _, Just _) -> whatSet `S.difference` hidingSet
return . S.toList $ finalSet
-- | Finds all symbols in a particular namespace, of all types.
allSymbolNames :: L2.L2NamespaceID -> L2.L2Model -> [L1.L1Identifier]
allSymbolNames nsID m =
let
Just nsc = M.lookup nsID (L2.l2AllNamespaces m)
in
map (L1.L1Identifier (L2.l2nsSrcSpan nsc)) $
concatMap (\f -> f nsc)
[M.keys . L2.l2nsNamespaces,
M.keys . L2.l2nsDomains,
M.keys . L2.l2nsNamedValues,
M.keys . L2.l2nsClassValues,
M.keys . L2.l2nsUnits,
M.keys . L2.l2nsClasses,
M.keys . L2.l2nsDomainFunctions,
M.keys . L2.l2nsLabels
]
allocDomainID :: ModelTranslation L2.L2DomainID
allocDomainID = do
newID <- L2.l2NextDomain <$> getL2Model
modifyL2Model $ \mod -> mod { L2.l2NextDomain = (\(L2.L2DomainID n) -> L2.L2DomainID (n + 1)) newID }
return newID
-- | Finds a symbol, using the scoped resolution rules and a custom function to
-- check each namespace for a symbol of the appropriate type. Fails with an error
-- appropriate for the end user if the symbol cannot be found.
findScopedSymbolByRAPath :: L1.SrcSpan -> L2.L2NamespaceID -> L1.L1RelOrAbsPath ->
(L2.L2NamespaceContents -> L1.L1Identifier -> Maybe a) -> ModelTranslation a
findScopedSymbolByRAPath ss startNS ra tryGet = do
(nsra@(L1.L1RelOrAbsPath _ isabs rp), symName) <- maybe (fail $ "Inappropriate use of an empty path, at " ++ show ss)
return $ trySplitRAPathOnLast ra
-- Recursively find the namespace...
nsid <- findNamespaceByRAPath ss startNS nsra
let foldStrategy =
if (not isabs && null (L1.l1RelPathIDs rp))
then flip (flip foldOverNSScopesM Nothing) nsid
else (\x -> x Nothing nsid)
r <- foldStrategy $ \s nsid' -> (mplus s) <$> do
nsc <- (fromJust . M.lookup nsid' . L2.l2AllNamespaces) <$> getL2Model
return $ tryGet nsc symName
maybe (fail $ "Symbol " ++ (BSC.unpack . L1.l1IdBS $ symName) ++
" not found in namespace at " ++ (show . L1.l1IdSS $ symName)) return r
trySplitRAPathOnLast :: L1.L1RelOrAbsPath -> Maybe (L1.L1RelOrAbsPath, L1.L1Identifier)
trySplitRAPathOnLast (L1.L1RelOrAbsPath ss ra p) =
maybe Nothing (\(x, y) -> Just (L1.L1RelOrAbsPath ss ra x, y))
(trySplitRPathOnLast p)
trySplitRPathOnLast (L1.L1RelPath rpss []) = Nothing
trySplitRPathOnLast (L1.L1RelPath rpss l) = Just (L1.L1RelPath rpss (init l), last l)
-- | Performs a fold over all namespaces where a symbol might be found.
foldOverNSScopesM :: (s -> L2.L2NamespaceID -> ModelTranslation s) -> s -> L2.L2NamespaceID -> ModelTranslation s
foldOverNSScopesM f s0 nsid
| nsid == nsSpecial = return s0
| otherwise = do
s1 <- f s0 nsid
nsc <- (fromJust . M.lookup nsid . L2.l2AllNamespaces) <$> getL2Model
foldOverNSScopesM f s1 (L2.l2nsParent nsc)
-- | Finds a Level 2 namespace using a Level 1 RelOrAbsPath
findNamespaceByRAPath :: L1.SrcSpan -> L2.L2NamespaceID -> L1.L1RelOrAbsPath -> ModelTranslation L2.L2NamespaceID
findNamespaceByRAPath _ thisNSID (L1.L1RelOrAbsPath _ _ (L1.L1RelPath _ [])) = return thisNSID
findNamespaceByRAPath ss thisNSID rapath = findScopedSymbolByRAPath ss thisNSID rapath $ \nsc ident ->
M.lookup (L1.l1IdBS ident) (L2.l2nsNamespaces nsc)
-- | Runs a model translation such that any new assertions that are added do not
-- end up in the global assertion list, but instead becomes part of the result.
isolateAssertions :: ModelTranslation a -> ModelTranslation (a, [L2.L2Expression])
isolateAssertions f = do
origAssertions <- L2.l2AllAssertions <$> getL2Model
modifyL2Model $ \mod -> mod { L2.l2AllAssertions = [] }
r <- f
newAssertions <- L2.l2AllAssertions <$> getL2Model
modifyL2Model $ \mod -> mod { L2.l2AllAssertions = origAssertions }
return (r, newAssertions)
setScopedUnitName :: BS.ByteString -> L2.L2ScopedUnitID -> L2.L2ScopedUnitID
setScopedUnitName bs (L2.L2ScopedUnitID _ suid) = L2.L2ScopedUnitID bs suid
setScopedDomainName :: BS.ByteString -> L2.L2ScopedDomainID -> L2.L2ScopedDomainID
setScopedDomainName bs (L2.L2ScopedDomainID _ suid) = L2.L2ScopedDomainID bs suid
-- | Find all distinct namespace IDs that are children of a given namespace.
-- Note that namespaces that are children by virtue of an import are excluded.
findNamespaceDescendents :: String -> L2.L2NamespaceID -> ModelTranslation (S.Set L2.L2NamespaceID)
findNamespaceDescendents why nsid = do
nsc <- getNamespaceContents "getdescendents - self" nsid
trueChildren <-
filterM (\childNS -> ((==nsid) . L2.l2nsParent) <$> getNamespaceContents ("descendents - child of " ++ show nsid ++ " - " ++ why) childNS) $
M.elems (L2.l2nsNamespaces nsc)
descs1 <- S.unions <$> mapM (findNamespaceDescendents (why++"(C)")) trueChildren
return $ descs1 `S.union` (S.fromList trueChildren)
-- | Finds a namespace in a Level 2 model, treating a particular namespace as root and applying a level 1 namespace path. Fails with an error if the
-- namespace cannot be found. Does not look up the tree from the starting namespace for other matches if it is not found at the root.
findNamespaceInL2UsingL1Path :: Monad m => L2.L2NamespaceID -> L2.L2Model -> L1.L1RelPath -> m L2.L2NamespaceID
findNamespaceInL2UsingL1Path ns0 _ (L1.L1RelPath _ []) = return ns0
findNamespaceInL2UsingL1Path ns0 l2mod (L1.L1RelPath _ ((L1.L1Identifier ss l1id):l1ids)) = do
let Just nsc0 = M.lookup ns0 (L2.l2AllNamespaces l2mod)
case M.lookup l1id (L2.l2nsNamespaces nsc0) of
Nothing ->
fail $ "Attempt to import namespace " ++ (BSC.unpack l1id) ++ " which doesn't exist, at " ++ show ss
Just ns1 ->
findNamespaceInL2UsingL1Path ns1 l2mod (L1.L1RelPath ss l1ids)
-- Utility functions that are needed in this file, but don't use any special data structures from here.
-- | Finds the first 'Just' value return from a list of monadic expressions.
firstJustM :: Monad m => [m (Maybe a)] -> m (Maybe a)
firstJustM (mh:t) = do
h <- mh
maybe (firstJustM t) (return . Just) h
firstJustM [] = return Nothing
-- | Like mapMaybe, but runs in a monad.
mapMaybeM :: Monad m => (a -> m (Maybe b)) -> [a] -> m [b]
mapMaybeM f l = catMaybes `liftM` sequence (map f l)
-- | Like concatMap, but runs in a monad.
concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b]
concatMapM f l = concat `liftM` sequence (map f l)
partitionM :: (T.Foldable t, Monad f) => (a -> f Bool) -> t a -> f ([a], [a])
partitionM which cont = T.foldrM (\c (a, b) -> do
cwhich <- which c
return (if cwhich then (a, (c:b)) else ((c:a), b))
) ([], []) cont
| A1kmm/declarative-fieldml-prototype | src/Data/FieldML/Level1ToLevel2.hs | bsd-3-clause | 88,654 | 0 | 36 | 21,629 | 23,073 | 11,655 | 11,418 | 1,288 | 17 |
{-
Author: Andrey Mokhov, Newcastle University
Date: 24 December 2012
Contact: andrey.mokhov@{ncl.ac.uk, gmail.com}
Description: Testing Haskell interface to BDDs.
-}
import Predicate
import BDD
import Prelude hiding (not, (&&), (||))
x = variable 0
y = variable 1
z = (variable 2) :: Node Int
x' = not x
y' = not y
z' = not z
test :: Bool -> IO ()
test True = putStrLn "OK"
test False = putStrLn "FAIL"
main = do
test $ (true :: Node Int) /= false
test $ x == not x'
test $ x' == ite x false true
test $ (x && y) == (not (x' || y'))
test $ (x && z || y && z') == (x && y || x && z || y && z')
setCacheSize 65536
test $ ((x `xor` y) `xor` (x `xor` y')) == true
test $ (x && y) < x
test $ x' < (x' || y')
test $ iteTrue (x && x') false true
putStrLn "GC..."
runGC
putStrLn "Clearing..."
clear
putStrLn "Done"
| tuura/compote | src/haskell/test/test-Predicate.hs | bsd-3-clause | 877 | 3 | 13 | 241 | 392 | 195 | 197 | 28 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Script
( scriptCmd
) where
import Control.Exception (assert)
import Control.Exception.Safe (throwM)
import Control.Monad (unless, forM)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Logger
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import qualified Data.Conduit.List as CL
import Data.Foldable (fold)
import Data.List.Split (splitWhen)
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Store.VersionTagged (versionedDecodeOrLoad)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Path
import Path.IO
import qualified Stack.Build
import Stack.BuildPlan (loadBuildPlan)
import Stack.Exec
import Stack.GhcPkg (ghcPkgExeName)
import Stack.Options.ScriptParser
import Stack.Runners
import Stack.Types.BuildPlan
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.PackageName
import Stack.Types.Resolver
import Stack.Types.StackT
import Stack.Types.StringError
import System.FilePath (dropExtension, replaceExtension)
import System.Process.Read
-- | Run a Stack Script
scriptCmd :: ScriptOpts -> GlobalOpts -> IO ()
scriptCmd opts go' = do
let go = go'
{ globalConfigMonoid = (globalConfigMonoid go')
{ configMonoidInstallGHC = First $ Just True
}
, globalStackYaml = SYLNoConfig
}
withBuildConfigAndLock go $ \lk -> do
-- Some warnings in case the user somehow tries to set a
-- stack.yaml location. Note that in this functions we use
-- logError instead of logWarn because, when using the
-- interpreter mode, only error messages are shown. See:
-- https://github.com/commercialhaskell/stack/issues/3007
case globalStackYaml go' of
SYLOverride fp -> $logError $ T.pack
$ "Ignoring override stack.yaml file for script command: " ++ fp
SYLDefault -> return ()
SYLNoConfig -> assert False (return ())
config <- view configL
menv <- liftIO $ configEnvOverride config defaultEnvSettings
wc <- view $ actualCompilerVersionL.whichCompilerL
(targetsSet, coresSet) <-
case soPackages opts of
[] -> do
$logError "No packages provided, using experimental import parser"
getPackagesFromImports (globalResolver go) (soFile opts)
packages -> do
let targets = concatMap wordsComma packages
targets' <- mapM parsePackageNameFromString targets
return (Set.fromList targets', Set.empty)
unless (Set.null targetsSet) $ do
-- Optimization: use the relatively cheap ghc-pkg list
-- --simple-output to check which packages are installed
-- already. If all needed packages are available, we can
-- skip the (rather expensive) build call below.
bss <- sinkProcessStdout
Nothing menv (ghcPkgExeName wc)
["list", "--simple-output"] CL.consume -- FIXME use the package info from envConfigPackages, or is that crazy?
let installed = Set.fromList
$ map toPackageName
$ words
$ S8.unpack
$ S8.concat bss
if Set.null $ Set.difference (Set.map packageNameString targetsSet) installed
then $logDebug "All packages already installed"
else do
$logDebug "Missing packages, performing installation"
Stack.Build.build (const $ return ()) lk defaultBuildOptsCLI
{ boptsCLITargets = map packageNameText $ Set.toList targetsSet
}
let ghcArgs = concat
[ ["-hide-all-packages"]
, map (\x -> "-package" ++ x)
$ Set.toList
$ Set.insert "base"
$ Set.map packageNameString (Set.union targetsSet coresSet)
, case soCompile opts of
SEInterpret -> []
SECompile -> []
SEOptimize -> ["-O2"]
]
munlockFile lk -- Unlock before transferring control away.
case soCompile opts of
SEInterpret -> exec menv ("run" ++ compilerExeName wc)
(ghcArgs ++ soFile opts : soArgs opts)
_ -> do
file <- resolveFile' $ soFile opts
let dir = parent file
-- use sinkProcessStdout to ensure a ProcessFailed
-- exception is generated for better error messages
sinkProcessStdout
(Just dir)
menv
(compilerExeName wc)
(ghcArgs ++ [soFile opts])
CL.sinkNull
exec menv (toExeName $ toFilePath file) (soArgs opts)
where
toPackageName = reverse . drop 1 . dropWhile (/= '-') . reverse
-- Like words, but splits on both commas and spaces
wordsComma = splitWhen (\c -> c == ' ' || c == ',')
toExeName fp =
if isWindows
then replaceExtension fp "exe"
else dropExtension fp
isWindows :: Bool
#ifdef WINDOWS
isWindows = True
#else
isWindows = False
#endif
-- | Returns packages that need to be installed, and all of the core
-- packages. Reason for the core packages:
-- Ideally we'd have the list of modules per core package listed in
-- the build plan, but that doesn't exist yet. Next best would be to
-- list the modules available at runtime, but that gets tricky with when we install GHC. Instead, we'll just list all core packages
getPackagesFromImports :: Maybe AbstractResolver
-> FilePath
-> StackT EnvConfig IO (Set PackageName, Set PackageName)
getPackagesFromImports Nothing _ = throwM NoResolverWhenUsingNoLocalConfig
getPackagesFromImports (Just (ARResolver (ResolverSnapshot name))) scriptFP = do
(pns1, mns) <- liftIO $ parseImports <$> S8.readFile scriptFP
mi <- loadModuleInfo name
pns2 <-
if Set.null mns
then return Set.empty
else do
pns <- forM (Set.toList mns) $ \mn ->
case Map.lookup mn $ miModules mi of
Just pns ->
case Set.toList pns of
[] -> assert False $ return Set.empty
[pn] -> return $ Set.singleton pn
pns' -> throwString $ concat
[ "Module "
, S8.unpack $ unModuleName mn
, " appears in multiple packages: "
, unwords $ map packageNameString pns'
]
Nothing -> return Set.empty
return $ Set.unions pns `Set.difference` blacklist
return (Set.union pns1 pns2, modifyForWindows $ miCorePackages mi)
where
modifyForWindows
| isWindows = Set.insert $(mkPackageName "Win32") . Set.delete $(mkPackageName "unix")
| otherwise = id
getPackagesFromImports (Just (ARResolver (ResolverCompiler _))) _ = return (Set.empty, Set.empty)
getPackagesFromImports (Just aresolver) _ = throwM $ InvalidResolverForNoLocalConfig $ show aresolver
-- | The Stackage project introduced the concept of hidden packages,
-- to deal with conflicting module names. However, this is a
-- relatively recent addition (at time of writing). See:
-- http://www.snoyman.com/blog/2017/01/conflicting-module-names. To
-- kick this thing off a bit better, we're included a blacklist of
-- packages that should never be auto-parsed in.
blacklist :: Set PackageName
blacklist = Set.fromList
[ $(mkPackageName "async-dejafu")
, $(mkPackageName "monads-tf")
, $(mkPackageName "crypto-api")
, $(mkPackageName "fay-base")
, $(mkPackageName "hashmap")
, $(mkPackageName "hxt-unicode")
, $(mkPackageName "hledger-web")
, $(mkPackageName "plot-gtk3")
, $(mkPackageName "gtk3")
, $(mkPackageName "regex-pcre-builtin")
, $(mkPackageName "regex-compat-tdfa")
, $(mkPackageName "log")
, $(mkPackageName "zip")
, $(mkPackageName "monad-extras")
, $(mkPackageName "control-monad-free")
, $(mkPackageName "prompt")
, $(mkPackageName "kawhi")
, $(mkPackageName "language-c")
, $(mkPackageName "gl")
, $(mkPackageName "svg-tree")
, $(mkPackageName "Glob")
, $(mkPackageName "nanospec")
, $(mkPackageName "HTF")
, $(mkPackageName "courier")
, $(mkPackageName "newtype-generics")
, $(mkPackageName "objective")
, $(mkPackageName "binary-ieee754")
, $(mkPackageName "rerebase")
, $(mkPackageName "cipher-aes")
, $(mkPackageName "cipher-blowfish")
, $(mkPackageName "cipher-camellia")
, $(mkPackageName "cipher-des")
, $(mkPackageName "cipher-rc4")
, $(mkPackageName "crypto-cipher-types")
, $(mkPackageName "crypto-numbers")
, $(mkPackageName "crypto-pubkey")
, $(mkPackageName "crypto-random")
, $(mkPackageName "cryptohash")
, $(mkPackageName "cryptohash-conduit")
, $(mkPackageName "cryptohash-md5")
, $(mkPackageName "cryptohash-sha1")
, $(mkPackageName "cryptohash-sha256")
]
toModuleInfo :: BuildPlan -> ModuleInfo
toModuleInfo bp = ModuleInfo
{ miCorePackages = Map.keysSet $ siCorePackages $ bpSystemInfo bp
, miModules =
Map.unionsWith Set.union
$ map ((\(pn, mns) ->
Map.fromList
$ map (\mn -> (ModuleName $ encodeUtf8 mn, Set.singleton pn))
$ Set.toList mns) . fmap (sdModules . ppDesc))
$ filter (\(pn, pp) ->
not (pcHide $ ppConstraints pp) &&
pn `Set.notMember` blacklist)
$ Map.toList (bpPackages bp)
}
-- | Where to store module info caches
moduleInfoCache :: SnapName -> StackT EnvConfig IO (Path Abs File)
moduleInfoCache name = do
root <- view stackRootL
platform <- platformGhcVerOnlyRelDir
name' <- parseRelDir $ T.unpack $ renderSnapName name
-- These probably can't vary at all based on platform, even in the
-- future, so it's safe to call this unnecessarily paranoid.
return (root </> $(mkRelDir "script") </> name' </> platform </> $(mkRelFile "module-info.cache"))
loadModuleInfo :: SnapName -> StackT EnvConfig IO ModuleInfo
loadModuleInfo name = do
path <- moduleInfoCache name
$(versionedDecodeOrLoad moduleInfoVC) path $ toModuleInfo <$> loadBuildPlan name
parseImports :: ByteString -> (Set PackageName, Set ModuleName)
parseImports =
fold . mapMaybe parseLine . S8.lines
where
stripPrefix x y
| x `S8.isPrefixOf` y = Just $ S8.drop (S8.length x) y
| otherwise = Nothing
parseLine bs0 = do
bs1 <- stripPrefix "import " bs0
let bs2 = S8.dropWhile (== ' ') bs1
bs3 = fromMaybe bs2 $ stripPrefix "qualified " bs2
case stripPrefix "\"" bs3 of
Just bs4 -> do
pn <- parsePackageNameFromString $ S8.unpack $ S8.takeWhile (/= '"') bs4
Just (Set.singleton pn, Set.empty)
Nothing -> Just
( Set.empty
, Set.singleton
$ ModuleName
$ S8.takeWhile (\c -> c /= ' ' && c /= '(') bs3
)
| Fuuzetsu/stack | src/Stack/Script.hs | bsd-3-clause | 12,242 | 0 | 24 | 4,024 | 2,683 | 1,371 | 1,312 | 227 | 9 |
module Races (predictions, predictionsExts) where
import Ultra
import Converters
import Data.Maybe
import PredictionExts
import Interval
import Text.Printf
data Race =
Normal { name :: String
, distance :: Double } |
TimeBased { name :: String
, time :: Double } |
Trail { name :: String
, distance :: Double
, hardness :: Double } |
Interval { name :: String
, reps :: Integer
, delay :: Double
, legDistance :: Double
}
oneHour :: Double
oneHour = 3600.0
races :: [Race]
races =
[ Normal "800 m" 800
, Normal "1500 m" 1500
, Normal "3000 m" 3000
, Normal "5000 m" 5000
, TimeBased "Cooper" (12 * 60.0)
, TimeBased "6 h rata" (6 * oneHour)
, TimeBased "12 h rata" (12 * oneHour)
, TimeBased "24 h rata" (24 * oneHour)
, TimeBased "48 h rata" (48 * oneHour)
, Normal "Kymppi" 10000
, Normal "Puolimaraton" 21098
, Normal "Maraton" 42195
, Normal "100 km" 100000
, Normal "100 mailia" 161000
, Normal "200 km" 200000
, Trail "Vaajakosken maastoultra" 60000 66500
, Trail "Vaarojen ultra" 84000 114320
, Trail "Mongolian Sunrise to Sunset" 100000 125100
, Trail "Spartathlon" 246000 280000
, Interval "Tonnit, 5 x 1 km / 6 min" 5 (6 * 60.0) 1000
, Interval "Yassot, 10 x 800 m / 6 min" 10 (6 * 60.0) 800
, Interval "4 x 400 m / 5 min" 4 (5 * 60.0) 400
]
getRaceTime :: Predictor -> Race -> Double
getRaceTime predictor race = case race of
Normal _ distance -> timeByDistance predictor distance
Trail _ _ hardness -> timeByDistance predictor hardness
TimeBased _ time -> time
Interval _ repetitions delay legDistance -> getIntervalTimePrediction predictor repetitions delay legDistance
getRaceDistance :: Predictor -> Race -> Double
getRaceDistance predictor race = case race of
Normal _ distance -> distance
Trail _ distance _ -> distance
TimeBased _ time -> distanceByTime predictor time
Interval _ repetitions _ distance -> (fromIntegral repetitions) * distance
formatSpeed :: Double -> Double -> String
formatSpeed time distance =
let timeticks = 0.001 + (fromIntegral $ round time)::Double
minutes = timeticks/60.0
kilometers = distance/1000
pace = minutes / kilometers
paceminutes = (fromIntegral $ (floor pace))::Double
paceseconds = ((pace - paceminutes) * 60.0)::Double
in
printf "%02.f:%02.f/km" paceminutes paceseconds
timeDecorator :: Double -> Race -> String
timeDecorator raceTime (Interval _ reps _ _) =
raceTimeString
++
" ("
++
(fromJust $ fromTime $ raceTime / fromIntegral reps)
++
")"
where
raceTimeString = fromJust $ fromTime raceTime
timeDecorator raceTime race = fromJust $ fromTime raceTime
getPrediction :: Predictor -> Extensions -> Race -> [(String, String)]
getPrediction predictor pexts race =
let raceTimeGetter = getRaceTime predictor
raceDistance = getRaceDistance predictor
raceTime = raceTimeGetter race in
[ ("name", name race)
, ("time", timeDecorator raceTime race)
, ("distance", fromDistance $ raceDistance race)
, ("speed", formatSpeed raceTime (raceDistance race))
]
++
predictionExts predictor pexts raceTime
predictions :: Predictor -> [[(String, String)]]
predictions predictor = map (getPrediction predictor (extensions 0.0 0.0 0.0)) races
predictionsExts :: Predictor -> Extensions -> [[(String, String)]]
predictionsExts predictor pexts = map (getPrediction predictor pexts) races
| jrosti/ultra | src/Races.hs | bsd-3-clause | 3,734 | 11 | 12 | 1,020 | 1,063 | 565 | 498 | 92 | 4 |
import Graphics.Rendering.Chart
import Data.Colour
import Data.Colour.Names
import Data.Default.Class
import Graphics.Rendering.Chart.Backend.Cairo
import Control.Lens
import System.Environment(getArgs)
setLinesBlue :: PlotLines a b -> PlotLines a b
setLinesBlue = plot_lines_style . line_color .~ opaque blue
chart args = toRenderable layout
where
am :: Double -> Double
am x = (sin (x*3.14159/45) + 1) / 2 * (sin (x*3.14159/5))
sinusoid1 = plot_lines_values .~ [[ (x,(am x)) | x <- [0,(0.5)..400]]]
$ plot_lines_style . line_color .~ opaque blue
$ plot_lines_title .~ "am"
$ def
sinusoid2 = plot_points_style .~ filledCircles 2 (opaque green)
$ plot_points_values .~ [ (x,(am x)) | x <- [0,7..400]]
$ plot_points_title .~ "am points"
$ def
layout = layout_title .~ (head args)
$ layout_plots .~ [toPlot sinusoid1,
toPlot sinusoid2]
$ def
--main1 :: [String] -> IO ()
main' :: [String] -> IO (PickFn ())
main' args = renderableToFile def "example11_big.png" $ chart args
main = do
args <- getArgs
main' args
| visood/bioalgo | src/Chapters/Replication/Patterns/runningCount.hs | bsd-3-clause | 1,179 | 0 | 17 | 318 | 404 | 214 | 190 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Rash.Runtime.Process where
import qualified Control.Concurrent.Async as Async
import Control.Exception (throw, try)
import Control.Monad.IO.Class (liftIO)
import qualified Control.Monad.Trans.State as State
import qualified Data.Map.Strict as Map
import qualified GHC.IO.Exception as IOE
import qualified GHC.IO.Handle as Handle
import qualified System.Process as Proc
import Rash.IR.AST
import qualified Rash.Debug as Debug
import qualified Rash.Runtime.Runtime as RT
import Rash.Runtime.Types
import Rash.Util
die :: String -> Value -> r
die = Debug.die "proc"
handleForString :: String -> IO Handle.Handle
handleForString s = do
(r, w) <- Proc.createPipe
_ <- Handle.hPutStr r s
return w
evalPipe :: [(String, [Value])] -> Handle.Handle -> EvalExprFn -> WithState Value
evalPipe commands stdin evalProgram = do
-- TODO: when you call a pipe, what do you do with the "output"? Obviously,
-- you stream it to the parent. And occasionally the parent will be stdout. So
-- clearly, we need to pass - implicitly - the handle from the calling
-- function.
-- However, that breaks our metaphor of "returning" a packet with the streams in it...
-- TODO: we need to handle stderr too.
-- TODO support exit codes
stdout <- RT.getStdout
stderr <- RT.getStderr
do
pipes :: [(Handle.Handle, Handle.Handle)] <- liftIO $ mapM (const Proc.createPipe) commands
let pipes1 :: [(Handle.Handle, Handle.Handle)] = tail pipes
let pipes2 :: [Handle.Handle] = foldl (\c (a,b) -> c ++ [a, b]) [] pipes1
let pipes3 :: [Handle.Handle] = [stdin] ++ pipes2 ++ [stdout]
let joiner = (\case
(a:b:cs) -> [Handles a b stderr] ++ (joiner cs)
[_] -> error "shouldn't happen"
[] -> [])
let pipes4 :: [Handles] = joiner pipes3
let entirity = zip pipes4 commands
procs <- mapM buildSegment entirity
-- TODO: if an exit code is fail, stop further processes
codes <- liftIO $ mapM waitForProcess procs
let rv = last codes
return $ VPacket rv
where
buildSegment :: (Handles, (String, [Value])) -> WithState (Process)
buildSegment (handles, (cmd, args)) = do
ft <- RT.getFuncTable
let func = Map.lookup cmd ft
procHandle <- case func of
Just f -> createFuncThread f args handles evalProgram
Nothing -> liftIO $ createBackgroundProc cmd args handles
return procHandle
data Process = FuncProc (Async.Async RetVal)
| ProcProc Proc.ProcessHandle
| DeferredException IOE.IOException
waitForProcess :: Process -> IO RetVal
waitForProcess (FuncProc asyncid) = do
e <- Async.waitCatch asyncid
either throw return e
waitForProcess (ProcProc handle) = do
rv <- Proc.waitForProcess handle
return $ VResult rv
waitForProcess (DeferredException
e@(IOE.IOError { IOE.ioe_type = IOE.NoSuchThing })) = do
-- TODO: this should be put in stderr for that process
putStrLn $ "Tried to run executable " ++ (IOE.ioe_location e) ++ ", but it wasn't found. Maybe a PATH problem?"
return $ vfail (-1)
waitForProcess (DeferredException e) = do
putStrLn "Exception thrown in process:"
print e
return $ vfail (-1)
value2ProcString :: Value -> String
value2ProcString (VString s) = s
value2ProcString (VInt i) = show i
value2ProcString x = die "valueToProcString" x
createBackgroundProc :: String -> [Value] -> Handles -> IO Process
createBackgroundProc cmd args (Handles stdin stdout stderr) = do
let p = (Proc.proc cmd (map value2ProcString args)) {
Proc.std_in = Proc.UseHandle stdin
, Proc.std_out = Proc.UseHandle stdout
, Proc.std_err = Proc.UseHandle stderr
, Proc.close_fds = True }
result <- try $ liftIO $ Proc.createProcess_ cmd p
case result of
Right (_, _, _, proc) -> return $ ProcProc proc
Left e -> return $ DeferredException e
createFuncThread :: Function -> [Value] -> Handles -> EvalExprFn -> WithState Process
createFuncThread func args handles evalExpr = do
state <- RT.getState
asyncid <- do liftIO $ Async.async $ do
runFunction func args handles state evalExpr
return $ FuncProc asyncid
runFunction :: Function -> [Value] -> Handles -> IState -> EvalExprFn -> IO RetVal
runFunction (UserDefined (FuncDef _ params body))
args handles state evalExpr = do
-- new stack frame, with args TODO: copy the "globals"
let st = foldr (\((FunctionParameter param), arg)
table
-> Map.insert param arg table)
Map.empty
(zip params args)
let newState = state { frame_ = Frame st handles }
val <- State.evalStateT (mapM evalExpr body) newState
return $ b2rv . isTruthy $ (last val)
runFunction (Builtin fn) args handles state _ = do
let newState = state { frame_ = Frame Map.empty handles }
State.evalStateT (fn args) $ newState
| pbiggar/rash | src/Rash/Runtime/Process.hs | bsd-3-clause | 5,103 | 0 | 18 | 1,260 | 1,542 | 793 | 749 | 103 | 4 |
{-# LANGUAGE Rank2Types, DefaultSignatures #-}
import Prelude hiding (sum)
import Data.Monoid
import Data.Void
import Control.Applicative
import Control.Monad.Cont
newtype Explore a =
Explore { unExplore :: forall m. Monoid m => Cont m a }
runExplore :: Monoid m => Explore a -> (a -> m) -> m
runExplore = runCont . unExplore
mkExplore :: (forall m. Monoid m => (a -> m) -> m) -> Explore a
mkExplore f = Explore $ cont f
instance Functor Explore where
fmap f e = mkExplore $ \g -> runExplore e (g . f)
instance Applicative Explore where
pure x = Explore $ pure x
mf <*> mx = Explore $ unExplore mf <*> unExplore mx
instance Monad Explore where
return = pure
m >>= f = Explore $ unExplore m >>= unExplore . f
instance Alternative Explore where
empty = mkExplore mempty
mx <|> my = mkExplore (runExplore mx `mappend` runExplore my)
instance MonadPlus Explore where
mzero = empty
mplus = (<|>)
(<.|.>) :: Alternative f => f a -> f b -> f (Either a b)
x <.|.> y = Left <$> x <|> Right <$> y
boolToNum :: Num n => Bool -> n
boolToNum False = 0
boolToNum True = 1
class Summable a where
sum :: Num n => (a -> n) -> n
default sum :: (Explorable a, Num n) => (a -> n) -> n
sum f = getSum $ explore (Sum . f)
count :: Num n => (a -> Bool) -> n
count f = sum (boolToNum . f)
class Summable a => Explorable a where
exploration :: Explore a
exploration = mkExplore explore
explore :: Monoid m => (a -> m) -> m
explore = runExplore exploration
exploreWith :: Monoid m => (m -> r) -> (r -> m) -> (a -> r) -> r
exploreWith proj inj f = proj $ explore (inj . f)
product :: Num n => (a -> n) -> n
product = exploreWith getProduct Product
withEndo :: Monoid m => (a -> m) -> m
withEndo f = exploreWith appEndo Endo (\x -> (f x <>)) mempty
fAll :: (Applicative f, Monoid (f a)) => f a
fAll = explore pure
listAll :: [a]
listAll = fAll
-- refactor
diffAll :: (Applicative f, Monoid (f a)) => f a
diffAll = withEndo pure
-- refactor
diffListAll :: [a]
diffListAll = diffAll
findFirst :: (a -> Maybe b) -> Maybe b
findFirst = exploreWith getFirst First
findLast :: (a -> Maybe b) -> Maybe b
findLast = exploreWith getLast Last
any :: (a -> Bool) -> Bool
any = exploreWith getAny Any
all :: (a -> Bool) -> Bool
all = exploreWith getAll All
{-
newtype EndoExploration a = EndoExploration { getEndoExploration :: a }
instance Explorable a => Explorable (EndoExploration a) where
explore f = ...
instance Explorable a => Summable (EndoExploration a)
-}
filter :: (a -> Bool) -> Explore a -> Explore a
filter p e = mkExplore $ \f -> runExplore e $ \x -> if p x then f x else mempty
{-
GHCI> runExplore (Main.filter (uncurry (==)) exploration) pure :: [(Bool,Bool)]
[(False,False),(True,True)]
-}
instance Explorable Void where exploration = empty
instance Summable Void
instance Explorable () where exploration = pure ()
instance Summable ()
instance Explorable Bool where exploration = pure False <|> pure True
instance Summable Bool
instance (Explorable a, Explorable b) => Explorable (a, b) where
exploration = liftM2 (,) exploration exploration
instance (Summable a, Summable b) => Summable (a, b) where
sum = runCont $ liftM2 (,) (cont sum) (cont sum)
instance (Explorable a, Explorable b) => Explorable (Either a b) where
exploration = exploration <.|.> exploration
instance (Summable a, Summable b) => Summable (Either a b) where
sum f = sum (f . Left) + sum (f . Right)
instance Explorable a => Explorable (Maybe a) where
exploration = pure Nothing <|> Just <$> exploration
instance Summable a => Summable (Maybe a) where
sum f = f Nothing + sum (f . Just)
-- This instance only make sense for non-strict explorations
instance Explorable a => Explorable [a] where
explore f = f [] <> explore (\(x,xs) -> f (x:xs))
-- Apart from non-strict Num instance this will be undefined
instance Summable a => Summable [a] where
sum f = f [] + sum (\(x,xs) -> f (x:xs))
count_uniq_prop :: (Eq a, Explorable a) => a -> Bool
count_uniq_prop x = count (==x) == 1
-- {-
--data H = H
--h :: a
--h = h
--import Debug.Trace
--tr f s = trace (f s) s
-- -}
-- -}
-- -}
-- -}
-- -}
| crypto-agda/explore | explore.hs | bsd-3-clause | 4,205 | 0 | 12 | 933 | 1,630 | 847 | 783 | 90 | 2 |
module Parse.Module (moduleDecl, elmModule) where
import qualified Control.Applicative
import Data.Map.Strict hiding (foldl, map)
import Elm.Utils ((|>))
import Text.Parsec hiding (newline, spaces)
import Parse.Helpers
import Parse.Declaration as Decl
import qualified AST.Declaration
import qualified AST.Module as Module
import qualified AST.Variable as Var
import AST.V0_16
import Parse.IParser
import Parse.Whitespace
elmModule :: IParser Module.Module
elmModule =
do preModule <- option [] freshLine
h <- moduleDecl
preDocsComments <- option [] freshLine
(docs, postDocsComments) <-
choice
[ (,) <$> addLocation (Just <$> docCommentAsMarkdown) <*> freshLine
, (,) <$> addLocation (return Nothing) <*> return []
]
(preImportComments, imports', postImportComments) <- imports
decls <- declarations
trailingComments <-
(++)
<$> option [] freshLine
<*> option [] spaces
eof
return $
Module.Module
preModule
h
docs
(preDocsComments ++ postDocsComments ++ preImportComments, imports')
((map AST.Declaration.BodyComment postImportComments) ++ decls ++ (map AST.Declaration.BodyComment trailingComments))
declarations :: IParser [AST.Declaration.Decl]
declarations =
(++) <$> ((\x -> [x]) <$> Decl.declaration)
<*> (concat <$> many freshDef)
freshDef :: IParser [AST.Declaration.Decl]
freshDef =
commitIf (freshLine >> (letter <|> char '_')) $
do comments <- freshLine
decl <- Decl.declaration
return $ (map AST.Declaration.BodyComment comments) ++ [decl]
moduleDecl :: IParser Module.Header
moduleDecl =
choice
[ try moduleDecl_0_16
, moduleDecl_0_17
, return $
Module.Header
Module.Normal
(Commented [] [UppercaseIdentifier "Main"] [])
Nothing
(KeywordCommented [] [] $ Var.OpenListing $ Commented [] () [])
]
moduleDecl_0_16 :: IParser Module.Header
moduleDecl_0_16 =
expecting "a module declaration" $
do try (reserved "module")
preName <- whitespace
names <- dotSep1 capVar <?> "the name of this module"
postName <- whitespace
exports <- option (Var.OpenListing (Commented [] () [])) (listing detailedListing)
preWhere <- whitespace
reserved "where"
return $
Module.Header
Module.Normal
(Commented preName names postName)
Nothing
(KeywordCommented preWhere [] exports)
moduleDecl_0_17 :: IParser Module.Header
moduleDecl_0_17 =
expecting "a module declaration" $
do
srcTag <-
try $
choice
[ Module.Port <$> (reserved "port" *> whitespace)
, Module.Effect <$> (reserved "effect" *> whitespace)
, return Module.Normal
]
<* reserved "module"
preName <- whitespace
names <- dotSep1 capVar <?> "the name of this module"
whereClause <-
optionMaybe $
commentedKeyword "where" $
brackets $ (\f pre post _ -> f pre post) <$> commaSep1 (keyValue equals lowVar capVar)
exports <-
commentedKeyword "exposing" $
listing detailedListing
return $
Module.Header
srcTag
(Commented preName names [])
whereClause
exports
mergePreCommented :: (a -> a -> a) -> PreCommented a -> PreCommented a -> PreCommented a
mergePreCommented merge (pre1, left) (pre2, right) =
(pre1 ++ pre2, merge left right)
mergeDetailedListing :: Module.DetailedListing -> Module.DetailedListing -> Module.DetailedListing
mergeDetailedListing left right =
Module.DetailedListing
(mergeCommentedMap (\() () -> ()) (Module.values left) (Module.values right))
(mergeCommentedMap (\() () -> ()) (Module.operators left) (Module.operators right))
(mergeCommentedMap (mergePreCommented $ mergeListing $ mergeCommentedMap (\() () -> ())) (Module.types left) (Module.types right))
imports :: IParser (Comments, Map [UppercaseIdentifier] (Comments, Module.ImportMethod), Comments)
imports =
let
merge :: PreCommented Module.ImportMethod -> PreCommented Module.ImportMethod -> PreCommented Module.ImportMethod
merge (comments1, import1) (comments2, import2) =
( comments1 ++ comments2
, Module.ImportMethod
(Module.alias import1 Control.Applicative.<|> Module.alias import2)
(mergePreCommented (mergePreCommented $ mergeListing mergeDetailedListing) (Module.exposedVars import1) (Module.exposedVars import2))
)
step (comments, m, finalComments) (((pre, name), method), post) =
( comments ++ finalComments
, insertWith merge name (pre, method) m
, post
)
done :: [(Module.UserImport, Comments)] -> (Comments, Map [UppercaseIdentifier] (Comments, Module.ImportMethod), Comments)
done results =
foldl step ([], empty, []) results
in
done <$> many ((,) <$> import' <*> freshLine)
import' :: IParser Module.UserImport
import' =
expecting "an import" $
do try (reserved "import")
preName <- whitespace
names <- dotSep1 capVar
method' <- method names
return ((,) preName names, method')
where
method :: [UppercaseIdentifier] -> IParser Module.ImportMethod
method originalName =
Module.ImportMethod
<$> option Nothing (Just <$> as' originalName)
<*> option ([], ([], Var.ClosedListing)) exposing
as' :: [UppercaseIdentifier] -> IParser (Comments, PreCommented UppercaseIdentifier)
as' moduleName =
do preAs <- try (whitespace <* reserved "as")
postAs <- whitespace
(,) preAs <$> (,) postAs <$> capVar <?> ("an alias for module `" ++ show moduleName ++ "`") -- TODO: do something correct instead of show
exposing :: IParser (Comments, PreCommented (Var.Listing Module.DetailedListing))
exposing =
do preExposing <- try (whitespace <* reserved "exposing")
postExposing <- whitespace
imports <- listing detailedListing
return (preExposing, (postExposing, imports))
listing :: IParser (Comments -> Comments -> a) -> IParser (Var.Listing a)
listing explicit =
expecting "a listing of values and types to expose, like (..)" $
do _ <- try (char '(')
pushNewlineContext
pre <- whitespace
listing <-
choice
[ (\_ pre post _ -> (Var.OpenListing (Commented pre () post))) <$> string ".."
, (\x pre post sawNewline ->
(Var.ExplicitListing (x pre post) sawNewline))
<$> explicit
]
post <- whitespace
sawNewline <- popNewlineContext
_ <- char ')'
return $ listing pre post sawNewline
commentedSet :: Ord a => IParser a -> IParser (Comments -> Comments -> Var.CommentedMap a ())
commentedSet item =
commaSep1Set' ((\x -> (x, ())) <$> item) (\() () -> ())
detailedListing :: IParser (Comments -> Comments -> Module.DetailedListing)
detailedListing =
do
values <- commaSep1' value
return $ \pre post -> toDetailedListing $ values pre post
mergeCommentedMap :: Ord k => (v -> v -> v) -> Var.CommentedMap k v -> Var.CommentedMap k v -> Var.CommentedMap k v
mergeCommentedMap merge left right =
let
merge' (Commented pre1 a post1) (Commented pre2 b post2) =
Commented (pre1 ++ pre2) (merge a b) (post1 ++ post2)
in
unionWith merge' left right
mergeListing :: (a -> a -> a) -> Var.Listing a -> Var.Listing a -> Var.Listing a
mergeListing merge left right =
case (left, right) of
(Var.ClosedListing, Var.ClosedListing) -> Var.ClosedListing
(Var.ClosedListing, Var.OpenListing comments) -> Var.OpenListing comments
(Var.OpenListing comments, Var.ClosedListing) -> Var.OpenListing comments
(Var.OpenListing (Commented pre1 () post1), Var.OpenListing (Commented pre2 () post2)) -> Var.OpenListing (Commented (pre1 ++ pre2) () (post1 ++ post2))
(Var.ClosedListing, Var.ExplicitListing a multiline) -> Var.ExplicitListing a multiline
(Var.ExplicitListing a multiline, Var.ClosedListing) -> Var.ExplicitListing a multiline
(Var.OpenListing comments, Var.ExplicitListing a multiline) -> Var.OpenListing comments
(Var.ExplicitListing a multiline, Var.OpenListing comments) -> Var.OpenListing comments
(Var.ExplicitListing a multiline1, Var.ExplicitListing b multiline2) -> Var.ExplicitListing (merge a b) (multiline1 || multiline2)
toDetailedListing :: [Commented Var.Value] -> Module.DetailedListing
toDetailedListing values =
let
merge
(Commented pre1 (inner1, tags1) post1)
(Commented pre2 (inner2, tags2) post2)
=
Commented
(pre1 ++ pre2)
( inner1 ++ inner2
, mergeListing (mergeCommentedMap (\() () -> ())) tags1 tags2
)
(post1 ++ post2)
step (vs, os, ts) (Commented pre val post) =
case val of
Var.Value name ->
(insert name (Commented pre () post) vs, os, ts)
Var.OpValue name ->
(vs, insert name (Commented pre () post) os, ts)
Var.Union (name, inner) tags ->
(vs, os, insertWith merge name (Commented pre (inner, tags) post) ts)
done (vs, os, ts) =
Module.DetailedListing vs os ts
in
foldl step (empty, empty, empty) values
|> done
value :: IParser Var.Value
value =
val <|> tipe <?> "a value or type to expose"
where
val =
(Var.Value <$> lowVar) <|> (Var.OpValue <$> parens' symOp)
tipe =
do name <- capVar
maybeCtors <- optionMaybe (try $ (,) <$> whitespace <*> listing (commentedSet capVar))
case maybeCtors of
Nothing -> return $ Var.Union (name, []) Var.ClosedListing
Just (pre, ctors) -> return (Var.Union (name, pre) ctors)
| nukisman/elm-format-short | parser/src/Parse/Module.hs | bsd-3-clause | 10,194 | 0 | 18 | 2,788 | 3,281 | 1,679 | 1,602 | 226 | 9 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DataKinds #-}
module ManageMyTime.Docs.Undecidable where
import Servant.Docs (ToSample, ToCapture(..), DocCapture(..), singleSample, toSamples)
import Servant.API (Capture)
import Database.Persist.Sql (ToBackendKey, SqlBackend, Key, toSqlKey)
instance (ToBackendKey SqlBackend a) => ToSample (Key a) where
toSamples _ = singleSample $ toSqlKey 1
instance (ToBackendKey SqlBackend a) => ToCapture (Capture "id" (Key a)) where
toCapture _ =
DocCapture "id"
"id (integer) of the resource to access"
| berdario/managemytime | src/ManageMyTime/Docs/Undecidable.hs | bsd-3-clause | 628 | 0 | 9 | 116 | 156 | 90 | 66 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Hasbitica.Settings where
import Control.Applicative ((<|>))
import Data.Maybe(listToMaybe,maybeToList)
import Data.Aeson (FromJSON, decode)
import qualified Data.ByteString.Lazy.Char8 as B
import GHC.Generics
import Hasbitica.Api
import System.Directory (getHomeDirectory)
import System.FilePath.Posix ((</>))
data Settings = Settings {address,user,key::String}
deriving (Show,Generic)
instance FromJSON Settings
readSettings :: IO (Maybe Settings)
readSettings = fmap (</> ".habitica") getHomeDirectory >>= fmap decode . B.readFile
readMultiSettings :: IO [Settings]
readMultiSettings = fmap (</> ".habitica") getHomeDirectory >>= fmap (concat . decode) . B.readFile
settingsToKey :: Settings -> HabiticaApiKey
settingsToKey Settings{..} = HabiticaApiKey user key
getApiFromSettings :: IO (Maybe HabiticaApiKey)
getApiFromSettings = do
sing <- readSettings
many <- readMultiSettings
return $ settingsToKey <$> (sing <|> listToMaybe many)
getAllApisFromSettings :: IO [HabiticaApiKey]
getAllApisFromSettings = do
sing <- readSettings
many <- readMultiSettings
return $ settingsToKey <$> (maybeToList sing ++ many)
| kobeyu/hasbitica | src/Hasbitica/Settings.hs | bsd-3-clause | 1,364 | 0 | 10 | 278 | 346 | 192 | 154 | 31 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.AR (classifiers) where
import Prelude
import Duckling.Ranking.Types
import qualified Data.HashMap.Strict as HashMap
import Data.String
classifiers :: Classifiers
classifiers = HashMap.fromList [] | rfranek/duckling | Duckling/Ranking/Classifiers/AR.hs | bsd-3-clause | 825 | 0 | 6 | 105 | 66 | 47 | 19 | 8 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
module Language.Granule.Checker.Checker where
import Control.Monad (unless)
import Control.Monad.State.Strict
import Control.Monad.Except (throwError)
import Data.List (genericLength)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NonEmpty (toList)
import Data.Maybe
import qualified Data.Text as T
import Language.Granule.Checker.Constraints.Compile
import Language.Granule.Checker.Coeffects
import Language.Granule.Checker.Effects
import Language.Granule.Checker.Constraints
import Language.Granule.Checker.Kinds
import Language.Granule.Checker.KindsImplicit
import Language.Granule.Checker.Exhaustivity
import Language.Granule.Checker.Monad
import Language.Granule.Checker.NameClash
import Language.Granule.Checker.Patterns
import Language.Granule.Checker.Predicates
import qualified Language.Granule.Checker.Primitives as Primitives
import Language.Granule.Checker.Simplifier
import Language.Granule.Checker.SubstitutionContexts
import Language.Granule.Checker.Substitution
import Language.Granule.Checker.Types
import Language.Granule.Checker.Variables
import Language.Granule.Context
import Language.Granule.Syntax.Identifiers
import Language.Granule.Syntax.Helpers (freeVars, hasHole)
import Language.Granule.Syntax.Def
import Language.Granule.Syntax.Expr
import Language.Granule.Syntax.Pretty
import Language.Granule.Syntax.Span
import Language.Granule.Syntax.Type
import Language.Granule.Utils
--import Debug.Trace
-- Checking (top-level)
check :: (?globals :: Globals)
=> AST () ()
-> IO (Either (NonEmpty CheckerError) (AST () Type))
check ast@(AST dataDecls defs imports hidden name) =
evalChecker (initState { allHiddenNames = hidden }) $ (do
_ <- checkNameClashes ast
_ <- runAll checkTyCon (Primitives.dataTypes ++ dataDecls)
_ <- runAll checkDataCons (Primitives.dataTypes ++ dataDecls)
defs <- runAll kindCheckDef defs
let defCtxt = map (\(Def _ name _ tys) -> (name, tys)) defs
defs <- runAll (checkDef defCtxt) defs
pure $ AST dataDecls defs imports hidden name)
-- Synthing the type of a single expression in the context of an asy
synthExprInIsolation :: (?globals :: Globals)
=> AST () ()
-> Expr () ()
-> IO (Either (NonEmpty CheckerError) (Either TypeScheme Kind))
synthExprInIsolation ast@(AST dataDecls defs imports hidden name) expr =
evalChecker (initState { allHiddenNames = hidden }) $ (do
_ <- checkNameClashes ast
_ <- runAll checkTyCon (Primitives.dataTypes ++ dataDecls)
_ <- runAll checkDataCons (Primitives.dataTypes ++ dataDecls)
defs <- runAll kindCheckDef defs
let defCtxt = map (\(Def _ name _ tys) -> (name, tys)) defs
-- Since we need to return a type scheme, have a look first
-- for top-level identifiers with their schemes
case expr of
-- Lookup in data constructors
(Val s _ (Constr _ c [])) -> do
mConstructor <- lookupDataConstructor s c
case mConstructor of
Just (tySch, _) -> return $ Left tySch
Nothing -> do
st <- get
-- Or see if this is a kind constructors
case lookup c (Primitives.typeConstructors <> (typeConstructors st)) of
Just (k, _, _) -> return $ Right k
Nothing -> throw UnboundDataConstructor{ errLoc = s, errId = c }
-- Lookup in definitions
(Val s _ (Var _ x)) -> do
case lookup x (defCtxt <> Primitives.builtins) of
Just tyScheme -> return $ Left tyScheme
Nothing -> throw UnboundVariableError{ errLoc = s, errId = x }
-- Otherwise, do synth
_ -> do
(ty, _, _, _) <- synthExpr defCtxt [] Positive expr
return $ Left $ Forall nullSpanNoFile [] [] ty)
-- TODO: we are checking for name clashes again here. Where is the best place
-- to do this check?
checkTyCon :: DataDecl -> Checker ()
checkTyCon d@(DataDecl sp name tyVars kindAnn ds)
= lookup name <$> gets typeConstructors >>= \case
Just _ -> throw TypeConstructorNameClash{ errLoc = sp, errId = name }
Nothing -> modify' $ \st ->
st{ typeConstructors = (name, (tyConKind, cardin, isIndexedDataType d)) : typeConstructors st }
where
cardin = (Just . genericLength) ds -- the number of data constructors
tyConKind = mkKind (map snd tyVars)
mkKind [] = case kindAnn of Just k -> k; Nothing -> KType -- default to `Type`
mkKind (v:vs) = KFun v (mkKind vs)
checkDataCons :: (?globals :: Globals) => DataDecl -> Checker ()
checkDataCons (DataDecl sp name tyVars k dataConstrs) = do
st <- get
let kind = case lookup name (typeConstructors st) of
Just (kind,_,_) -> kind
Nothing -> error $ "Internal error. Trying to lookup data constructor " <> pretty name
modify' $ \st -> st { tyVarContext = [(v, (k, ForallQ)) | (v, k) <- tyVars] }
mapM_ (checkDataCon name kind tyVars) dataConstrs
checkDataCon :: (?globals :: Globals)
=> Id -- ^ The type constructor and associated type to check against
-> Kind -- ^ The kind of the type constructor
-> Ctxt Kind -- ^ The type variables
-> DataConstr -- ^ The data constructor to check
-> Checker () -- ^ Return @Just ()@ on success, @Nothing@ on failure
checkDataCon
tName
kind
tyVarsT
d@(DataConstrIndexed sp dName tySch@(Forall s tyVarsD constraints ty)) = do
case map fst $ intersectCtxts tyVarsT tyVarsD of
[] -> do -- no clashes
-- Only relevant type variables get included
let tyVars = relevantSubCtxt (freeVars ty) (tyVarsT <> tyVarsD)
let tyVars_justD = relevantSubCtxt (freeVars ty) tyVarsD
-- Add the type variables from the data constructor into the environment
modify $ \st -> st { tyVarContext =
[(v, (k, ForallQ)) | (v, k) <- tyVars_justD] ++ tyVarContext st }
tySchKind <- inferKindOfTypeInContext sp tyVars ty
-- Freshen the data type constructors type
(ty, tyVarsFreshD, _, constraints, []) <-
freshPolymorphicInstance ForallQ False (Forall s tyVars constraints ty) []
-- Create a version of the data constructor that matches the data type head
-- but with a list of coercions
(ty', coercions, tyVarsNewAndOld) <- checkAndGenerateSubstitution sp tName ty (indexKinds kind)
-- Reconstruct the data constructor's new type scheme
let tyVarsD' = tyVarsFreshD <> tyVarsNewAndOld
let tySch = Forall sp tyVarsD' constraints ty'
case tySchKind of
KType ->
registerDataConstructor tySch coercions
KPromote (TyCon k) | internalName k == "Protocol" ->
registerDataConstructor tySch coercions
_ -> throw KindMismatch{ errLoc = sp, tyActualK = Just ty, kExpected = KType, kActual = kind }
(v:vs) -> (throwError . fmap mkTyVarNameClashErr) (v:|vs)
where
indexKinds (KFun k1 k2) = k1 : indexKinds k2
indexKinds k = []
registerDataConstructor dataConstrTy subst = do
st <- get
case extend (dataConstructors st) dName (dataConstrTy, subst) of
Just ds -> put st { dataConstructors = ds, tyVarContext = [] }
Nothing -> throw DataConstructorNameClashError{ errLoc = sp, errId = dName }
mkTyVarNameClashErr v = DataConstructorTypeVariableNameClash
{ errLoc = sp
, errDataConstructorId = dName
, errTypeConstructor = tName
, errVar = v
}
checkDataCon tName kind tyVars d@DataConstrNonIndexed{}
= checkDataCon tName kind tyVars
$ nonIndexedToIndexedDataConstr tName tyVars d
{-
Checks whether the type constructor name matches the return constraint
of the data constructor
and at the same time generate coercions for every parameter of result type type constructor
then generate fresh variables for parameter and coercions that are either trivial
variable ones or to concrete types
e.g.
checkAndGenerateSubstitution Maybe (a' -> Maybe a') [Type]
> (a' -> Maybe a, [a |-> a'], [a : Type])
checkAndGenerateSubstitution Other (a' -> Maybe a') [Type]
> *** fails
checkAndGenerateSubstitution Vec (Vec 0 t') [Nat, Type]
> (Vec n t', [n |-> Subst 0, t |-> t'], [n : Type, ])
checkAndGenerateSubstitution Vec (t' -> Vec n' t' -> Vec (n'+1) t') [Nat, Type]
> (t' -> Vec n' t' -> Vec n t, [n |-> Subst (n'+1), t |-> t'], [])
checkAndGenerateSubstitution Foo (Int -> Foo Int) [Type]
> (Int -> Foo t1, [t1 |-> Subst Int], [t1 : Type])
-}
checkAndGenerateSubstitution ::
Span -- ^ Location of this application
-> Id -- ^ Name of the type constructor
-> Type -- ^ Type of the data constructor
-> [Kind] -- ^ Types of the remaining data type indices
-> Checker (Type, Substitution, Ctxt Kind)
checkAndGenerateSubstitution sp tName ty ixkinds =
checkAndGenerateSubstitution' sp tName ty (reverse ixkinds)
where
checkAndGenerateSubstitution' sp tName (TyCon tC) []
| tC == tName = return (TyCon tC, [], [])
| otherwise = throw UnexpectedTypeConstructor
{ errLoc = sp, tyConActual = tC, tyConExpected = tName }
checkAndGenerateSubstitution' sp tName (FunTy arg res) kinds = do
(res', subst, tyVarsNew) <- checkAndGenerateSubstitution' sp tName res kinds
return (FunTy arg res', subst, tyVarsNew)
checkAndGenerateSubstitution' sp tName (TyApp fun arg) (kind:kinds) = do
varSymb <- freshIdentifierBase "t"
let var = mkId varSymb
(fun', subst, tyVarsNew) <- checkAndGenerateSubstitution' sp tName fun kinds
return (TyApp fun' (TyVar var), (var, SubstT arg) : subst, (var, kind) : tyVarsNew)
checkAndGenerateSubstitution' sp _ t _ =
throw InvalidTypeDefinition { errLoc = sp, errTy = t }
checkDef :: (?globals :: Globals)
=> Ctxt TypeScheme -- context of top-level definitions
-> Def () () -- definition
-> Checker (Def () Type)
checkDef defCtxt (Def s defName equations tys@(Forall s_t foralls constraints ty)) = do
-- duplicate forall bindings
case duplicates (map (sourceName . fst) foralls) of
[] -> pure ()
(d:ds) -> throwError $ fmap (DuplicateBindingError s_t) (d :| ds)
-- Clean up knowledge shared between equations of a definition
modify (\st -> st { guardPredicates = [[]]
, patternConsumption = initialisePatternConsumptions equations } )
elaboratedEquations :: [Equation () Type] <- forM equations $ \equation -> do -- Checker [Maybe (Equation () Type)]
-- Erase the solver predicate between equations
modify' $ \st -> st
{ predicateStack = []
, tyVarContext = []
, guardContexts = []
}
elaboratedEq <- checkEquation defCtxt defName equation tys
-- Solve the generated constraints
checkerState <- get
let predicate = Conj $ predicateStack checkerState
solveConstraints predicate (getSpan equation) defName
pure elaboratedEq
checkGuardsForImpossibility s defName
checkGuardsForExhaustivity s defName ty equations
pure $ Def s defName elaboratedEquations tys
checkEquation :: (?globals :: Globals) =>
Ctxt TypeScheme -- context of top-level definitions
-> Id -- Name of the definition
-> Equation () () -- Equation
-> TypeScheme -- Type scheme
-> Checker (Equation () Type)
checkEquation defCtxt _ (Equation s () pats expr) tys@(Forall _ foralls constraints ty) = do
-- Check that the lhs doesn't introduce any duplicate binders
duplicateBinderCheck s pats
-- Freshen the type context
modify (\st -> st { tyVarContext = map (\(n, c) -> (n, (c, ForallQ))) foralls})
-- Create conjunct to capture the pattern constraints
newConjunct
mapM_ (\ty -> do
pred <- compileTypeConstraintToConstraint s ty
addPredicate pred) constraints
-- Build the binding context for the branch pattern
st <- get
(patternGam, tau, localVars, subst, elaborated_pats, consumptions) <-
ctxtFromTypedPatterns s ty pats (patternConsumption st)
-- Update the consumption information
modify (\st -> st { patternConsumption =
zipWith joinConsumption consumptions (patternConsumption st) } )
-- Create conjunct to capture the body expression constraints
newConjunct
-- Specialise the return type by the pattern generated substitution
debugM "eqn" $ "### -- patternGam = " <> show patternGam
debugM "eqn" $ "### -- localVars = " <> show localVars
debugM "eqn" $ "### -- tau = " <> show tau
tau' <- substitute subst tau
debugM "eqn" $ "### -- tau' = " <> show tau'
patternGam <- substitute subst patternGam
-- Check the body
(localGam, subst', elaboratedExpr) <-
checkExpr defCtxt patternGam Positive True tau' expr
case checkLinearity patternGam localGam of
[] -> do
localGam <- substitute subst localGam
-- Check that our consumption context approximations the binding
ctxtApprox s localGam patternGam
-- Conclude the implication
concludeImplication s localVars
-- Create elaborated equation
subst'' <- combineSubstitutions s subst subst'
let elab = Equation s ty elaborated_pats elaboratedExpr
elab' <- substitute subst'' elab
return elab'
-- Anything that was bound in the pattern but not used up
(p:ps) -> illLinearityMismatch s (p:|ps)
-- Polarities are used to understand when a type is
-- `expected` vs. `actual` (i.e., for error messages)
data Polarity = Positive | Negative deriving Show
flipPol :: Polarity -> Polarity
flipPol Positive = Negative
flipPol Negative = Positive
-- Type check an expression
-- `checkExpr defs gam t expr` computes `Just delta`
-- if the expression type checks to `t` in context `gam`:
-- where `delta` gives the post-computation context for expr
-- (which explains the exact coeffect demands)
-- or `Nothing` if the typing does not match.
checkExpr :: (?globals :: Globals)
=> Ctxt TypeScheme -- context of top-level definitions
-> Ctxt Assumption -- local typing context
-> Polarity -- polarity of <= constraints
-> Bool -- whether we are top-level or not
-> Type -- type
-> Expr () () -- expression
-> Checker (Ctxt Assumption, Substitution, Expr () Type)
-- Hit an unfilled hole
checkExpr _ ctxt _ _ t (Hole s _) = do
st <- get
let varContext = relevantSubCtxt (concatMap (freeVars . snd) ctxt ++ (freeVars t)) (tyVarContext st)
throw $ HoleMessage s (Just t) ctxt varContext
-- Checking of constants
checkExpr _ [] _ _ ty@(TyCon c) (Val s _ (NumInt n)) | internalName c == "Int" = do
let elaborated = Val s ty (NumInt n)
return ([], [], elaborated)
checkExpr _ [] _ _ ty@(TyCon c) (Val s _ (NumFloat n)) | internalName c == "Float" = do
let elaborated = Val s ty (NumFloat n)
return ([], [], elaborated)
checkExpr defs gam pol _ ty@(FunTy sig tau) (Val s _ (Abs _ p t e)) = do
-- If an explicit signature on the lambda was given, then check
-- it confirms with the type being checked here
(tau', subst1) <- case t of
Nothing -> return (tau, [])
Just t' -> do
(eqT, unifiedType, subst) <- equalTypes s sig t'
unless eqT $ throw TypeError{ errLoc = s, tyExpected = sig, tyActual = t' }
return (tau, subst)
newConjunct
(bindings, localVars, subst, elaboratedP, _) <- ctxtFromTypedPattern s sig p NotFull
debugM "binding from lam" $ pretty bindings
pIrrefutable <- isIrrefutable s sig p
if pIrrefutable then do
-- Check the body in the extended context
tau'' <- substitute subst tau'
newConjunct
(gam', subst2, elaboratedE) <- checkExpr defs (bindings <> gam) pol False tau'' e
-- Check linearity of locally bound variables
case checkLinearity bindings gam' of
[] -> do
subst <- combineSubstitutions s subst1 subst2
-- Locally we should have this property (as we are under a binder)
ctxtApprox s (gam' `intersectCtxts` bindings) bindings
concludeImplication s localVars
let elaborated = Val s ty (Abs ty elaboratedP t elaboratedE)
return (gam' `subtractCtxt` bindings, subst, elaborated)
(p:ps) -> illLinearityMismatch s (p:|ps)
else throw RefutablePatternError{ errLoc = s, errPat = p }
-- Application special case for built-in 'scale'
-- TODO: needs more thought
{- checkExpr defs gam pol topLevel tau
(App s _ (App _ _ (Val _ _ (Var _ v)) (Val _ _ (NumFloat _ x))) e) | internalName v == "scale" = do
equalTypes s (TyCon $ mkId "Float") tau
checkExpr defs gam pol topLevel (Box (CFloat (toRational x)) (TyCon $ mkId "Float")) e
-}
-- Application checking
checkExpr defs gam pol topLevel tau (App s _ e1 e2) = do
(argTy, gam2, subst2, elaboratedR) <- synthExpr defs gam pol e2
funTy <- substitute subst2 (FunTy argTy tau)
(gam1, subst1, elaboratedL) <- checkExpr defs gam pol topLevel funTy e1
gam <- ctxtPlus s gam1 gam2
subst <- combineSubstitutions s subst1 subst2
let elaborated = App s tau elaboratedL elaboratedR
return (gam, subst, elaborated)
{-
[G] |- e : t
---------------------
[G]*r |- [e] : []_r t
-}
-- Promotion
checkExpr defs gam pol _ ty@(Box demand tau) (Val s _ (Promote _ e)) = do
let vars =
if hasHole e
-- If we are promoting soemthing with a hole, then put all free variables in scope
then map fst gam
-- Otherwise we need to discharge only things that get used
else freeVars e
gamF <- discToFreshVarsIn s vars gam demand
(gam', subst, elaboratedE) <- checkExpr defs gamF pol False tau e
-- Causes a promotion of any typing assumptions that came from variable
-- inside a guard from an enclosing case that have kind Level
-- This prevents control-flow attacks and is a special case for Level
-- (the guard contexts come from a special context in the solver)
guardGam <- allGuardContexts
guardGam' <- filterM isLevelKinded guardGam
gam'' <- multAll s (vars <> map fst guardGam') demand (gam' <> guardGam')
let elaborated = Val s ty (Promote tau elaboratedE)
return (gam'', subst, elaborated)
where
-- Calculate whether a type assumption is level kinded
isLevelKinded (_, as) = do
ty <- inferCoeffectTypeAssumption s as
return $ case ty of
Just (TyCon (internalName -> "Level"))
-> True
Just (TyApp (TyCon (internalName -> "Interval"))
(TyCon (internalName -> "Level")))
-> True
_ -> False
-- Check a case expression
checkExpr defs gam pol True tau (Case s _ guardExpr cases) = do
-- Synthesise the type of the guardExpr
(guardTy, guardGam, substG, elaboratedGuard) <- synthExpr defs gam pol guardExpr
pushGuardContext guardGam
-- Dependent / GADT pattern matches not allowed in a case
ixed <- isIndexedType guardTy
when ixed (throw $ CaseOnIndexedType s guardTy)
newCaseFrame
-- Check each of the branches
branchCtxtsAndSubst <-
forM cases $ \(pat_i, e_i) -> do
-- Build the binding context for the branch pattern
newConjunct
(patternGam, eVars, subst, elaborated_pat_i, _) <- ctxtFromTypedPattern s guardTy pat_i NotFull
newConjunct
-- Checking the case body
(localGam, subst', elaborated_i) <- checkExpr defs (patternGam <> gam) pol False tau e_i
-- Check that the use of locally bound variables matches their bound type
ctxtApprox s (localGam `intersectCtxts` patternGam) patternGam
-- Conclude the implication
concludeImplication (getSpan pat_i) eVars
-- Check linear use in anything Linear
gamSoFar <- ctxtPlus s guardGam localGam
case checkLinearity patternGam gamSoFar of
-- Return the resulting computed context, without any of
-- the variable bound in the pattern of this branch
[] -> do
return (localGam `subtractCtxt` patternGam
, subst'
, (elaborated_pat_i, elaborated_i))
-- Anything that was bound in the pattern but not used correctly
p:ps -> illLinearityMismatch s (p:|ps)
-- All branches must be possible
checkGuardsForImpossibility s $ mkId "case"
-- Pop from stacks related to case
_ <- popGuardContext
popCaseFrame
-- Find the upper-bound of the contexts
let (branchCtxts, substs, elaboratedCases) = unzip3 branchCtxtsAndSubst
(branchesGam, tyVars) <- foldM (\(ctxt, vars) ctxt' -> do
(ctxt'', vars') <- joinCtxts s ctxt ctxt'
return (ctxt'', vars ++ vars')) (head branchCtxts, []) (tail branchCtxts)
-- Contract the outgoing context of the guard and the branches (joined)
g <- ctxtPlus s branchesGam guardGam
subst <- combineManySubstitutions s (substG : substs)
-- Exisentially quantify any ty variables generated by joining contexts
mapM_ (uncurry existential) tyVars
let elaborated = Case s tau elaboratedGuard elaboratedCases
return (g, subst, elaborated)
-- All other expressions must be checked using synthesis
checkExpr defs gam pol topLevel tau e = do
(tau', gam', subst', elaboratedE) <- synthExpr defs gam pol e
-- Now to do a type equality on check type `tau` and synth type `tau'`
(tyEq, _, subst) <-
if topLevel
-- If we are checking a top-level, then don't allow overapproximation
then do
debugM "** Compare for equality " $ pretty tau' <> " = " <> pretty tau
equalTypesWithPolarity (getSpan e) SndIsSpec tau' tau
else do
debugM "** Compare for equality " $ pretty tau' <> " :> " <> pretty tau
lEqualTypesWithPolarity (getSpan e) SndIsSpec tau' tau
if tyEq
then do
substFinal <- combineSubstitutions (getSpan e) subst subst'
return (gam', substFinal, elaboratedE)
else do
case pol of
Positive -> throw TypeError{ errLoc = getSpan e, tyExpected = tau , tyActual = tau' }
Negative -> throw TypeError{ errLoc = getSpan e, tyExpected = tau', tyActual = tau }
-- | Synthesise the 'Type' of expressions.
-- See <https://en.wikipedia.org/w/index.php?title=Bidirectional_type_checking&redirect=no>
synthExpr :: (?globals :: Globals)
=> Ctxt TypeScheme -- ^ Context of top-level definitions
-> Ctxt Assumption -- ^ Local typing context
-> Polarity -- ^ Polarity of subgrading
-> Expr () () -- ^ Expression
-> Checker (Type, Ctxt Assumption, Substitution, Expr () Type)
-- Hit an unfilled hole
synthExpr _ ctxt _ (Hole s _) = do
st <- get
let varContext = relevantSubCtxt (concatMap (freeVars . snd) ctxt) (tyVarContext st)
throw $ HoleMessage s Nothing ctxt varContext
-- Literals can have their type easily synthesised
synthExpr _ _ _ (Val s _ (NumInt n)) = do
let t = TyCon $ mkId "Int"
return (t, [], [], Val s t (NumInt n))
synthExpr _ _ _ (Val s _ (NumFloat n)) = do
let t = TyCon $ mkId "Float"
return (t, [], [], Val s t (NumFloat n))
synthExpr _ _ _ (Val s _ (CharLiteral c)) = do
let t = TyCon $ mkId "Char"
return (t, [], [], Val s t (CharLiteral c))
synthExpr _ _ _ (Val s _ (StringLiteral c)) = do
let t = TyCon $ mkId "String"
return (t, [], [], Val s t (StringLiteral c))
-- Secret syntactic weakening
synthExpr defs gam pol
(App s _ (Val _ _ (Var _ (sourceName -> "weak__"))) v@(Val _ _ (Var _ x))) = do
(t, _, subst, elabE) <- synthExpr defs gam pol v
return (t, [(x, Discharged t (CZero (TyCon $ mkId "Level")))], subst, elabE)
-- Constructors
synthExpr _ gam _ (Val s _ (Constr _ c [])) = do
-- Should be provided in the type checkers environment
st <- get
mConstructor <- lookupDataConstructor s c
case mConstructor of
Just (tySch, coercions) -> do
-- Freshen the constructor
-- (discarding any fresh type variables, info not needed here)
(ty, _, _, constraints, coercions') <- freshPolymorphicInstance InstanceQ False tySch coercions
mapM_ (\ty -> do
pred <- compileTypeConstraintToConstraint s ty
addPredicate pred) constraints
-- Apply coercions
ty <- substitute coercions' ty
let elaborated = Val s ty (Constr ty c [])
return (ty, [], [], elaborated)
Nothing -> throw UnboundDataConstructor{ errLoc = s, errId = c }
-- Case synthesis
synthExpr defs gam pol (Case s _ guardExpr cases) = do
-- Synthesise the type of the guardExpr
(guardTy, guardGam, substG, elaboratedGuard) <- synthExpr defs gam pol guardExpr
-- then synthesise the types of the branches
-- Dependent / GADT pattern matches not allowed in a case
ixed <- isIndexedType guardTy
when ixed (throw $ CaseOnIndexedType s guardTy)
newCaseFrame
branchTysAndCtxtsAndSubsts <-
forM cases $ \(pati, ei) -> do
-- Build the binding context for the branch pattern
newConjunct
(patternGam, eVars, subst, elaborated_pat_i, _) <- ctxtFromTypedPattern s guardTy pati NotFull
newConjunct
-- Synth the case body
(tyCase, localGam, subst', elaborated_i) <- synthExpr defs (patternGam <> gam) pol ei
-- Check that the use of locally bound variables matches their bound type
ctxtApprox s (localGam `intersectCtxts` patternGam) patternGam
-- Conclude
concludeImplication (getSpan pati) eVars
-- Check linear use in this branch
gamSoFar <- ctxtPlus s guardGam localGam
case checkLinearity patternGam gamSoFar of
-- Return the resulting computed context, without any of
-- the variable bound in the pattern of this branch
[] -> return (tyCase
, (localGam `subtractCtxt` patternGam, subst')
, (elaborated_pat_i, elaborated_i))
p:ps -> illLinearityMismatch s (p:|ps)
-- All branches must be possible
checkGuardsForImpossibility s $ mkId "case"
popCaseFrame
let (branchTys, branchCtxtsAndSubsts, elaboratedCases) = unzip3 branchTysAndCtxtsAndSubsts
let (branchCtxts, branchSubsts) = unzip branchCtxtsAndSubsts
let branchTysAndSpans = zip branchTys (map (getSpan . snd) cases)
-- Finds the upper-bound return type between all branches
branchType <- foldM (\ty2 (ty1, sp) -> joinTypes sp ty1 ty2)
(head branchTys)
(tail branchTysAndSpans)
-- Find the upper-bound type on the return contexts
(branchesGam, tyVars) <- foldM (\(ctxt, vars) ctxt' -> do
(ctxt'', vars') <- joinCtxts s ctxt ctxt'
return (ctxt'', vars ++ vars')) (head branchCtxts, []) (tail branchCtxts)
-- Contract the outgoing context of the guard and the branches (joined)
gamNew <- ctxtPlus s branchesGam guardGam
subst <- combineManySubstitutions s (substG : branchSubsts)
-- Exisentially quantify any ty variables generated by joining contexts
mapM_ (uncurry existential) tyVars
let elaborated = Case s branchType elaboratedGuard elaboratedCases
return (branchType, gamNew, subst, elaborated)
-- Diamond cut
-- let [[p]] <- [[e1 : sig]] in [[e2 : tau]]
synthExpr defs gam pol (LetDiamond s _ p optionalTySig e1 e2) = do
(sig, gam1, subst1, elaborated1) <- synthExpr defs gam pol e1
-- Check that a graded possibility type was inferred
(ef1, ty1) <- case sig of
Diamond ef1 ty1 -> return (ef1, ty1)
t -> throw ExpectedEffectType{ errLoc = s, errTy = t }
-- Type body of the let...
-- ...in the context of the binders from the pattern
(binders, _, substP, elaboratedP, _) <- ctxtFromTypedPattern s ty1 p NotFull
pIrrefutable <- isIrrefutable s ty1 p
unless pIrrefutable $ throw RefutablePatternError{ errLoc = s, errPat = p }
(tau, gam2, subst2, elaborated2) <- synthExpr defs (binders <> gam) pol e2
-- Check that a graded possibility type was inferred
(ef2, ty2) <- case tau of
Diamond ef2 ty2 -> return (ef2, ty2)
t -> throw ExpectedEffectType{ errLoc = s, errTy = t }
optionalSigEquality s optionalTySig ty1
-- Check that usage matches the binding grades/linearity
-- (performs the linearity check)
ctxtEquals s (gam2 `intersectCtxts` binders) binders
gamNew <- ctxtPlus s (gam2 `subtractCtxt` binders) gam1
(efTy, u) <- twoEqualEffectTypes s ef1 ef2
-- Multiply the effects
ef <- effectMult s efTy ef1 ef2
let t = Diamond ef ty2
subst <- combineManySubstitutions s [substP, subst1, subst2, u]
-- Synth subst
t' <- substitute substP t
let elaborated = LetDiamond s t elaboratedP optionalTySig elaborated1 elaborated2
return (t, gamNew, subst, elaborated)
-- Variables
synthExpr defs gam _ (Val s _ (Var _ x)) =
-- Try the local context
case lookup x gam of
Nothing ->
-- Try definitions in scope
case lookup x (defs <> Primitives.builtins) of
Just tyScheme -> do
(ty', _, _, constraints, []) <- freshPolymorphicInstance InstanceQ False tyScheme [] -- discard list of fresh type variables
mapM_ (\ty -> do
pred <- compileTypeConstraintToConstraint s ty
addPredicate pred) constraints
let elaborated = Val s ty' (Var ty' x)
return (ty', [], [], elaborated)
-- Couldn't find it
Nothing -> throw UnboundVariableError{ errLoc = s, errId = x }
-- In the local context
Just (Linear ty) -> do
let elaborated = Val s ty (Var ty x)
return (ty, [(x, Linear ty)], [], elaborated)
Just (Discharged ty c) -> do
k <- inferCoeffectType s c
let elaborated = Val s ty (Var ty x)
return (ty, [(x, Discharged ty (COne k))], [], elaborated)
-- Specialised application for scale
{-
TODO: needs thought
synthExpr defs gam pol
(App _ _ (Val _ _ (Var _ v)) (Val _ _ (NumFloat _ r))) | internalName v == "scale" = do
let float = TyCon $ mkId "Float"
return (FunTy (Box (CFloat (toRational r)) float) float, [])
-}
-- Application
synthExpr defs gam pol (App s _ e e') = do
(fTy, gam1, subst1, elaboratedL) <- synthExpr defs gam pol e
case fTy of
-- Got a function type for the left-hand side of application
(FunTy sig tau) -> do
liftIO $ debugM "FunTy sig" $ pretty sig
(gam2, subst2, elaboratedR) <- checkExpr defs gam (flipPol pol) False sig e'
gamNew <- ctxtPlus s gam1 gam2
subst <- combineSubstitutions s subst1 subst2
-- Synth subst
tau <- substitute subst2 tau
let elaborated = App s tau elaboratedL elaboratedR
return (tau, gamNew, subst, elaborated)
-- Not a function type
t -> throw LhsOfApplicationNotAFunction{ errLoc = s, errTy = t }
{- Promotion
[G] |- e : t
---------------------
[G]*r |- [e] : []_r t
-}
synthExpr defs gam pol (Val s _ (Promote _ e)) = do
debugM "Synthing a promotion of " $ pretty e
-- Create a fresh kind variable for this coeffect
vark <- freshIdentifierBase $ "kprom_[" <> pretty (startPos s) <> "]"
-- remember this new kind variable in the kind environment
modify (\st -> st { tyVarContext = (mkId vark, (KCoeffect, InstanceQ)) : tyVarContext st })
-- Create a fresh coeffect variable for the coeffect of the promoted expression
var <- freshTyVarInContext (mkId $ "prom_[" <> pretty (startPos s) <> "]") (KPromote $ TyVar $ mkId vark)
gamF <- discToFreshVarsIn s (freeVars e) gam (CVar var)
(t, gam', subst, elaboratedE) <- synthExpr defs gamF pol e
let finalTy = Box (CVar var) t
let elaborated = Val s finalTy (Promote t elaboratedE)
gam'' <- multAll s (freeVars e) (CVar var) gam'
return (finalTy, gam'', subst, elaborated)
-- BinOp
synthExpr defs gam pol (Binop s _ op e1 e2) = do
(t1, gam1, subst1, elaboratedL) <- synthExpr defs gam pol e1
(t2, gam2, subst2, elaboratedR) <- synthExpr defs gam pol e2
-- Look through the list of operators (of which there might be
-- multiple matching operators)
returnType <-
selectFirstByType t1 t2
. NonEmpty.toList
. Primitives.binaryOperators
$ op
gamOut <- ctxtPlus s gam1 gam2
subst <- combineSubstitutions s subst1 subst2
let elaborated = Binop s returnType op elaboratedL elaboratedR
return (returnType, gamOut, subst, elaborated)
where
-- No matching type were found (meaning there is a type error)
selectFirstByType t1 t2 [] = throw FailedOperatorResolution
{ errLoc = s, errOp = op, errTy = t1 .-> t2 .-> var "..." }
selectFirstByType t1 t2 ((FunTy opt1 (FunTy opt2 resultTy)):ops) = do
-- Attempt to use this typing
(result, local) <- peekChecker $ do
(eq1, _, _) <- equalTypes s t1 opt1
(eq2, _, _) <- equalTypes s t2 opt2
return (eq1 && eq2)
-- If successful then return this local computation
case result of
Right True -> local >> return resultTy
_ -> selectFirstByType t1 t2 ops
selectFirstByType t1 t2 (_:ops) = selectFirstByType t1 t2 ops
-- Abstraction, can only synthesise the types of
-- lambda in Church style (explicit type)
synthExpr defs gam pol (Val s _ (Abs _ p (Just sig) e)) = do
newConjunct
(bindings, localVars, substP, elaboratedP, _) <- ctxtFromTypedPattern s sig p NotFull
newConjunct
pIrrefutable <- isIrrefutable s sig p
if pIrrefutable then do
(tau, gam'', subst, elaboratedE) <- synthExpr defs (bindings <> gam) pol e
-- Locally we should have this property (as we are under a binder)
ctxtApprox s (gam'' `intersectCtxts` bindings) bindings
let finalTy = FunTy sig tau
let elaborated = Val s finalTy (Abs finalTy elaboratedP (Just sig) elaboratedE)
substFinal <- combineSubstitutions s substP subst
finalTy' <- substitute substP finalTy
concludeImplication s localVars
return (finalTy', gam'' `subtractCtxt` bindings, substFinal, elaborated)
else throw RefutablePatternError{ errLoc = s, errPat = p }
-- Abstraction, can only synthesise the types of
-- lambda in Church style (explicit type)
synthExpr defs gam pol (Val s _ (Abs _ p Nothing e)) = do
newConjunct
tyVar <- freshTyVarInContext (mkId "t") KType
let sig = (TyVar tyVar)
(bindings, localVars, substP, elaboratedP, _) <- ctxtFromTypedPattern s sig p NotFull
newConjunct
pIrrefutable <- isIrrefutable s sig p
if pIrrefutable then do
(tau, gam'', subst, elaboratedE) <- synthExpr defs (bindings <> gam) pol e
-- Locally we should have this property (as we are under a binder)
ctxtApprox s (gam'' `intersectCtxts` bindings) bindings
let finalTy = FunTy sig tau
let elaborated = Val s finalTy (Abs finalTy elaboratedP (Just sig) elaboratedE)
finalTy' <- substitute substP finalTy
concludeImplication s localVars
subst <- combineSubstitutions s substP subst
return (finalTy', gam'' `subtractCtxt` bindings, subst, elaborated)
else throw RefutablePatternError{ errLoc = s, errPat = p }
synthExpr _ _ _ e =
throw NeedTypeSignature{ errLoc = getSpan e, errExpr = e }
-- Check an optional type signature for equality against a type
optionalSigEquality :: (?globals :: Globals) => Span -> Maybe Type -> Type -> Checker ()
optionalSigEquality _ Nothing _ = pure ()
optionalSigEquality s (Just t) t' = do
_ <- equalTypes s t' t
pure ()
solveConstraints :: (?globals :: Globals) => Pred -> Span -> Id -> Checker ()
solveConstraints predicate s name = do
-- Get the coeffect kind context and constraints
checkerState <- get
let ctxtCk = tyVarContext checkerState
coeffectVars <- justCoeffectTypesConverted s ctxtCk
-- remove any variables bound already in the preciate
coeffectVars <- return (coeffectVars `deleteVars` boundVars predicate)
debugM "tyVarContext" (pretty $ tyVarContext checkerState)
debugM "context into the solver" (pretty $ coeffectVars)
debugM "Solver predicate" $ pretty predicate
result <- liftIO $ provePredicate predicate coeffectVars
case result of
QED -> return ()
NotValid msg -> do
msg' <- rewriteMessage msg
simplPred <- simplifyPred predicate
-- try trivial unsats again
let unsats' = trivialUnsatisfiableConstraints simplPred
if not (null unsats')
then mapM_ (\c -> throw GradingError{ errLoc = getSpan c, errConstraint = Neg c }) unsats'
else
if msg' == "is Falsifiable\n"
then throw SolverErrorFalsifiableTheorem
{ errLoc = s, errDefId = name, errPred = simplPred }
else throw SolverErrorCounterExample
{ errLoc = s, errDefId = name, errPred = simplPred }
NotValidTrivial unsats ->
mapM_ (\c -> throw GradingError{ errLoc = getSpan c, errConstraint = Neg c }) unsats
Timeout ->
throw SolverTimeout{ errLoc = s, errSolverTimeoutMillis = solverTimeoutMillis, errDefId = name, errContext = "grading", errPred = predicate }
OtherSolverError msg -> throw SolverError{ errLoc = s, errMsg = msg }
SolverProofError msg -> error msg
-- Rewrite an error message coming from the solver
rewriteMessage :: String -> Checker String
rewriteMessage msg = do
st <- get
let tyVars = tyVarContext st
let msgLines = T.lines $ T.pack msg
-- Rewrite internal names to source names
let msgLines' = map (\line -> foldl convertLine line tyVars) msgLines
return $ T.unpack (T.unlines msgLines')
where
convertLine line (v, (k, _)) =
-- Try to replace line variables in the line
let line' = T.replace (T.pack (internalName v)) (T.pack (sourceName v)) line
-- If this succeeds we might want to do some other replacements
line'' =
if line /= line' then
case k of
KPromote (TyCon (internalName -> "Level")) ->
T.replace (T.pack $ show privateRepresentation) (T.pack "Private")
(T.replace (T.pack $ show publicRepresentation) (T.pack "Public")
(T.replace (T.pack "Integer") (T.pack "Level") line'))
_ -> line'
else line'
in line''
justCoeffectTypesConverted :: (?globals::Globals)
=> Span -> [(a, (Kind, b))] -> Checker [(a, (Type, b))]
justCoeffectTypesConverted s xs = mapM convert xs >>= (return . catMaybes)
where
convert (var, (KPromote t, q)) = do
k <- inferKindOfType s t
if isCoeffectKind k
then return $ Just (var, (t, q))
else return Nothing
convert (var, (KVar v, q)) = do
k <- inferKindOfType s (TyVar v)
if isCoeffectKind k
then return $ Just (var, (TyVar v, q))
else return Nothing
convert _ = return Nothing
justCoeffectTypesConvertedVars :: (?globals::Globals)
=> Span -> [(Id, Kind)] -> Checker (Ctxt Type)
justCoeffectTypesConvertedVars s env = do
let implicitUniversalMadeExplicit = map (\(var, k) -> (var, (k, ForallQ))) env
env' <- justCoeffectTypesConverted s implicitUniversalMadeExplicit
return $ stripQuantifiers env'
-- | `ctxtEquals ctxt1 ctxt2` checks if two contexts are equal
-- and the typical pattern is that `ctxt2` represents a specification
-- (i.e. input to checking) and `ctxt1` represents actually usage
ctxtApprox :: (?globals :: Globals) =>
Span -> Ctxt Assumption -> Ctxt Assumption -> Checker ()
ctxtApprox s ctxt1 ctxt2 = do
-- intersection contains those ids from ctxt1 which appears in ctxt2
intersection <-
-- For everything in the right context
-- (which should come as an input to checking)
forM ctxt2 $ \(id, ass2) ->
-- See if it appears in the left context...
case lookup id ctxt1 of
-- ... if so equate
Just ass1 -> do
relateByAssumption s ApproximatedBy (id, ass1) (id, ass2)
return id
-- ... if not check to see if the missing variable is linear
Nothing ->
case ass2 of
-- Linear gets instantly reported
Linear t -> illLinearityMismatch s . pure $ LinearNotUsed id
-- Else, this could be due to weakening so see if this is allowed
Discharged t c -> do
kind <- inferCoeffectType s c
relateByAssumption s ApproximatedBy (id, Discharged t (CZero kind)) (id, ass2)
return id
-- Last we sanity check, if there is anything in ctxt1 that is not in ctxt2
-- then we have an issue!
forM_ ctxt1 $ \(id, ass1) ->
if (id `elem` intersection)
then return ()
else throw UnboundVariableError{ errLoc = s, errId = id }
-- | `ctxtEquals ctxt1 ctxt2` checks if two contexts are equal
-- and the typical pattern is that `ctxt2` represents a specification
-- (i.e. input to checking) and `ctxt1` represents actually usage
ctxtEquals :: (?globals :: Globals) =>
Span -> Ctxt Assumption -> Ctxt Assumption -> Checker ()
ctxtEquals s ctxt1 ctxt2 = do
-- intersection contains those ids from ctxt1 which appears in ctxt2
intersection <-
-- For everything in the right context
-- (which should come as an input to checking)
forM ctxt2 $ \(id, ass2) ->
-- See if it appears in the left context...
case lookup id ctxt1 of
-- ... if so equate
Just ass1 -> do
relateByAssumption s Eq (id, ass1) (id, ass2)
return id
-- ... if not check to see if the missing variable is linear
Nothing ->
case ass2 of
-- Linear gets instantly reported
Linear t -> illLinearityMismatch s . pure $ LinearNotUsed id
-- Else, this could be due to weakening so see if this is allowed
Discharged t c -> do
kind <- inferCoeffectType s c
relateByAssumption s Eq (id, Discharged t (CZero kind)) (id, ass2)
return id
-- Last we sanity check, if there is anything in ctxt1 that is not in ctxt2
-- then we have an issue!
forM_ ctxt1 $ \(id, ass1) ->
if (id `elem` intersection)
then return ()
else throw UnboundVariableError{ errLoc = s, errId = id }
{- | Take the least-upper bound of two contexts.
If one context contains a linear variable that is not present in
the other, then the resulting context will not have this linear variable.
Also return s a list of new type variable created to do the join. -}
joinCtxts :: (?globals :: Globals) => Span -> Ctxt Assumption -> Ctxt Assumption
-> Checker (Ctxt Assumption, Ctxt Kind)
joinCtxts s ctxt1 ctxt2 = do
-- All the type assumptions from ctxt1 whose variables appear in ctxt2
-- and weaken all others
ctxt <- intersectCtxtsWithWeaken s ctxt1 ctxt2
-- All the type assumptions from ctxt2 whose variables appear in ctxt1
-- and weaken all others
ctxt' <- intersectCtxtsWithWeaken s ctxt2 ctxt1
-- Make an context with fresh coeffect variables for all
-- the variables which are in both ctxt1 and ctxt2...
(varCtxt, tyVars) <- freshVarsIn s (map fst ctxt) ctxt
-- ... and make these fresh coeffects the upper-bound of the coeffects
-- in ctxt and ctxt'
_ <- zipWith3M_ (relateByLUB s) ctxt ctxt' varCtxt
-- Return the common upper-bound context of ctxt1 and ctxt2
return (varCtxt, tyVars)
where
zipWith3M_ :: Monad m => (a -> b -> c -> m d) -> [a] -> [b] -> [c] -> m [d]
zipWith3M_ f _ _ [] = return []
zipWith3M_ f _ [] _ = return []
zipWith3M_ f [] _ _ = return []
zipWith3M_ f (x:xs) (y:ys) (z:zs) = do
w <- f x y z
ws <- zipWith3M_ f xs ys zs
return $ w : ws
{- | intersect contexts and weaken anything not appear in both
relative to the left context (this is not commutative) -}
intersectCtxtsWithWeaken
:: (?globals :: Globals)
=> Span
-> Ctxt Assumption
-> Ctxt Assumption
-> Checker (Ctxt Assumption)
intersectCtxtsWithWeaken s a b = do
let intersected = intersectCtxts a b
-- All the things that were not shared
let remaining = b `subtractCtxt` intersected
let leftRemaining = a `subtractCtxt` intersected
weakenedRemaining <- mapM weaken remaining
let newCtxt = intersected <> filter isNonLinearAssumption (weakenedRemaining <> leftRemaining)
return . normaliseCtxt $ newCtxt
where
isNonLinearAssumption :: (Id, Assumption) -> Bool
isNonLinearAssumption (_, Discharged _ _) = True
isNonLinearAssumption _ = False
weaken :: (Id, Assumption) -> Checker (Id, Assumption)
weaken (var, Linear t) =
return (var, Linear t)
weaken (var, Discharged t c) = do
kind <- inferCoeffectType s c
return (var, Discharged t (CZero kind))
{- | Given an input context and output context, check the usage of
variables in the output, returning a list of usage mismatch
information if, e.g., a variable is bound linearly in the input but is not
used in the output, or is discharged in the output -}
checkLinearity :: Ctxt Assumption -> Ctxt Assumption -> [LinearityMismatch]
checkLinearity [] _ = []
checkLinearity ((v, Linear _):inCtxt) outCtxt =
case lookup v outCtxt of
-- Good: linear variable was used
Just Linear{} -> checkLinearity inCtxt outCtxt
-- Bad: linear variable was discharged (boxed var but binder not unboxed)
Just Discharged{} -> LinearUsedNonLinearly v : checkLinearity inCtxt outCtxt
Nothing -> LinearNotUsed v : checkLinearity inCtxt outCtxt
checkLinearity ((_, Discharged{}):inCtxt) outCtxt =
-- Discharged things can be discarded, so it doesn't matter what
-- happens with them
checkLinearity inCtxt outCtxt
-- Assumption that the two assumps are for the same variable
relateByAssumption :: (?globals :: Globals)
=> Span
-> (Span -> Coeffect -> Coeffect -> Type -> Constraint)
-> (Id, Assumption)
-> (Id, Assumption)
-> Checker ()
-- Linear assumptions ignored
relateByAssumption _ _ (_, Linear _) (_, Linear _) = return ()
-- Discharged coeffect assumptions
relateByAssumption s rel (_, Discharged _ c1) (_, Discharged _ c2) = do
(kind, (inj1, inj2)) <- mguCoeffectTypesFromCoeffects s c1 c2
addConstraint (rel s (inj1 c1) (inj2 c2) kind)
-- Linear binding and a graded binding (likely from a promotion)
relateByAssumption s _ (idX, _) (idY, _) =
if idX == idY
then throw UnifyGradedLinear{ errLoc = s, errLinearOrGraded = idX }
else error $ "Internal bug: " <> pretty idX <> " does not match " <> pretty idY
-- Relate 3 assumptions by the least-upper bound relation, i.e.,
-- `relateByLUB s c1 c2 c3` means `c3` is the lub of `c1` and `c2`
-- Assumption that the three assumptions are for the same variable
relateByLUB :: (?globals :: Globals)
=> Span
-> (Id, Assumption)
-> (Id, Assumption)
-> (Id, Assumption)
-> Checker ()
-- Linear assumptions ignored
relateByLUB _ (_, Linear _) (_, Linear _) (_, Linear _) = return ()
-- Discharged coeffect assumptions
relateByLUB s (_, Discharged _ c1) (_, Discharged _ c2) (_, Discharged _ c3) = do
(kind, (inj1, inj2)) <- mguCoeffectTypesFromCoeffects s c1 c2
addConstraint (Lub s (inj1 c1) (inj2 c2) c3 kind)
-- Linear binding and a graded binding (likely from a promotion)
relateByLUB s (idX, _) (idY, _) (_, _) =
if idX == idY
then throw UnifyGradedLinear{ errLoc = s, errLinearOrGraded = idX }
else error $ "Internal bug: " <> pretty idX <> " does not match " <> pretty idY
-- Replace all top-level discharged coeffects with a variable
-- and derelict anything else
-- but add a var
discToFreshVarsIn :: (?globals :: Globals) => Span -> [Id] -> Ctxt Assumption -> Coeffect
-> Checker (Ctxt Assumption)
discToFreshVarsIn s vars ctxt coeffect = mapM toFreshVar (relevantSubCtxt vars ctxt)
where
toFreshVar (var, Discharged t c) = do
(coeffTy, _) <- mguCoeffectTypesFromCoeffects s c coeffect
return (var, Discharged t (CSig c coeffTy))
toFreshVar (var, Linear t) = do
coeffTy <- inferCoeffectType s coeffect
return (var, Discharged t (COne coeffTy))
-- `freshVarsIn names ctxt` creates a new context with
-- all the variables names in `ctxt` that appear in the list
-- `vars` and are discharged are
-- turned into discharged coeffect assumptions annotated
-- with a fresh coeffect variable (and all variables not in
-- `vars` get deleted).
-- Returns also the list of newly generated type variables
-- e.g.
-- `freshVarsIn ["x", "y"] [("x", Discharged (2, Int),
-- ("y", Linear Int),
-- ("z", Discharged (3, Int)]
-- -> ([("x", Discharged (c5 :: Nat, Int),
-- ("y", Linear Int)]
-- , [c5 :: Nat])
freshVarsIn :: (?globals :: Globals) => Span -> [Id] -> (Ctxt Assumption)
-> Checker (Ctxt Assumption, Ctxt Kind)
freshVarsIn s vars ctxt = do
newCtxtAndTyVars <- mapM toFreshVar (relevantSubCtxt vars ctxt)
let (newCtxt, tyVars) = unzip newCtxtAndTyVars
return (newCtxt, catMaybes tyVars)
where
toFreshVar :: (Id, Assumption) -> Checker ((Id, Assumption), Maybe (Id, Kind))
toFreshVar (var, Discharged t c) = do
ctype <- inferCoeffectType s c
-- Create a fresh variable
freshName <- freshIdentifierBase (internalName var)
let cvar = mkId freshName
-- Update the coeffect kind context
modify (\s -> s { tyVarContext = (cvar, (promoteTypeToKind ctype, InstanceQ)) : tyVarContext s })
-- Return the freshened var-type mapping
-- and the new type variable
return ((var, Discharged t (CVar cvar)), Just (cvar, promoteTypeToKind ctype))
toFreshVar (var, Linear t) = return ((var, Linear t), Nothing)
-- Combine two contexts
ctxtPlus :: (?globals :: Globals) => Span -> Ctxt Assumption -> Ctxt Assumption
-> Checker (Ctxt Assumption)
ctxtPlus _ [] ctxt2 = return ctxt2
ctxtPlus s ((i, v) : ctxt1) ctxt2 = do
ctxt' <- extCtxt s ctxt2 i v
ctxtPlus s ctxt1 ctxt'
-- ExtCtxt the context
extCtxt :: (?globals :: Globals) => Span -> Ctxt Assumption -> Id -> Assumption
-> Checker (Ctxt Assumption)
extCtxt s ctxt var (Linear t) = do
case lookup var ctxt of
Just (Linear t') ->
if t == t'
then throw LinearityError{ errLoc = s, linearityMismatch = LinearUsedMoreThanOnce var }
else throw TypeVariableMismatch{ errLoc = s, errVar = var, errTy1 = t, errTy2 = t' }
Just (Discharged t' c) ->
if t == t'
then do
k <- inferCoeffectType s c
return $ replace ctxt var (Discharged t (c `CPlus` COne k))
else throw TypeVariableMismatch{ errLoc = s, errVar = var, errTy1 = t, errTy2 = t' }
Nothing -> return $ (var, Linear t) : ctxt
extCtxt s ctxt var (Discharged t c) = do
case lookup var ctxt of
Just (Discharged t' c') ->
if t == t'
then return $ replace ctxt var (Discharged t' (c `CPlus` c'))
else throw TypeVariableMismatch{ errLoc = s, errVar = var, errTy1 = t, errTy2 = t' }
Just (Linear t') ->
if t == t'
then do
k <- inferCoeffectType s c
return $ replace ctxt var (Discharged t (c `CPlus` COne k))
else throw TypeVariableMismatch{ errLoc = s, errVar = var, errTy1 = t, errTy2 = t' }
Nothing -> return $ (var, Discharged t c) : ctxt
-- Helper, foldM on a list with at least one element
fold1M :: Monad m => (a -> a -> m a) -> [a] -> m a
fold1M _ [] = error "Must have at least one case"
fold1M f (x:xs) = foldM f x xs
justLinear :: [(a, Assumption)] -> [(a, Assumption)]
justLinear [] = []
justLinear ((x, Linear t) : xs) = (x, Linear t) : justLinear xs
justLinear ((x, _) : xs) = justLinear xs
checkGuardsForExhaustivity :: (?globals :: Globals)
=> Span -> Id -> Type -> [Equation () ()] -> Checker ()
checkGuardsForExhaustivity s name ty eqs = do
debugM "Guard exhaustivity" "todo"
return ()
checkGuardsForImpossibility :: (?globals :: Globals) => Span -> Id -> Checker ()
checkGuardsForImpossibility s name = do
-- Get top of guard predicate stack
st <- get
let ps = head $ guardPredicates st
-- Convert all universal variables to existential
let tyVarContextExistential =
mapMaybe (\(v, (k, q)) ->
case q of
BoundQ -> Nothing
_ -> Just (v, (k, InstanceQ))) (tyVarContext st)
tyVars <- justCoeffectTypesConverted s tyVarContextExistential
-- For each guard predicate
forM_ ps $ \((ctxt, p), s) -> do
-- Existentially quantify those variables occuring in the pattern in scope
let thm = foldr (uncurry Exists) p ctxt
debugM "impossibility" $ "about to try" <> pretty thm
-- Try to prove the theorem
result <- liftIO $ provePredicate thm tyVars
p <- simplifyPred thm
case result of
QED -> return ()
-- Various kinds of error
-- TODO make errors better
NotValid msg -> throw ImpossiblePatternMatch
{ errLoc = s
, errId = name
, errPred = p
}
NotValidTrivial unsats -> throw ImpossiblePatternMatchTrivial
{ errLoc = s
, errId = name
, errUnsats = unsats
}
Timeout -> throw SolverTimeout
{ errLoc = s
, errDefId = name
, errSolverTimeoutMillis = solverTimeoutMillis
, errContext = "pattern match of an equation"
, errPred = p
}
OtherSolverError msg -> throw ImpossiblePatternMatch
{ errLoc = s
, errId = name
, errPred = p
}
SolverProofError msg -> error msg
| dorchard/gram_lang | frontend/src/Language/Granule/Checker/Checker.hs | bsd-3-clause | 53,808 | 0 | 25 | 13,421 | 14,602 | 7,552 | 7,050 | 841 | 14 |
-- !!! Bug # 7600.
-- See file T7600 for main description.
{-# LANGUAGE CPP #-}
module T7600_A (test_run) where
import Control.Monad.ST
import Data.Array.Unsafe( castSTUArray )
import Data.Array.ST hiding( castSTUArray )
import Data.Char
import Data.Word
import Numeric
import GHC.Float
#include "ghcconfig.h"
-- Test run
test_run :: Float -> Double -> IO ()
test_run float_number double_number = do
print $ dToStr double_number
-- XXX: Below is the bad code due to changing with optimisation.
-- print $ dToStr (widen $ narrow double_number)
print $ dToStr (widen' $ narrow' double_number)
-- use standard Haskell functions for type conversion... which are kind of
-- insane (see ticket # 3676) [these fail when -O0 is used...]
narrow :: Double -> Float
{-# NOINLINE narrow #-}
narrow = realToFrac
widen :: Float -> Double
{-# NOINLINE widen #-}
widen = realToFrac
-- use GHC specific functions which work as expected [work for both -O0 and -O]
narrow' :: Double -> Float
{-# NOINLINE narrow' #-}
narrow' = double2Float
widen' :: Float -> Double
{-# NOINLINE widen' #-}
widen' = float2Double
doubleToBytes :: Double -> [Int]
doubleToBytes d
= runST (do
arr <- newArray_ ((0::Int),7)
writeArray arr 0 d
arr <- castDoubleToWord8Array arr
i0 <- readArray arr 0
i1 <- readArray arr 1
i2 <- readArray arr 2
i3 <- readArray arr 3
i4 <- readArray arr 4
i5 <- readArray arr 5
i6 <- readArray arr 6
i7 <- readArray arr 7
return (map fromIntegral [i0,i1,i2,i3,i4,i5,i6,i7])
)
castFloatToWord8Array :: STUArray s Int Float -> ST s (STUArray s Int Word8)
castFloatToWord8Array = castSTUArray
castDoubleToWord8Array :: STUArray s Int Double -> ST s (STUArray s Int Word8)
castDoubleToWord8Array = castSTUArray
dToStr :: Double -> String
dToStr d
= let bs = doubleToBytes d
hex d' = case showHex d' "" of
[] -> error "dToStr: too few hex digits for float"
[x] -> ['0',x]
[x,y] -> [x,y]
_ -> error "dToStr: too many hex digits for float"
str = map toUpper $ concat . fixEndian . (map hex) $ bs
in "0x" ++ str
fixEndian :: [a] -> [a]
#ifdef WORDS_BIGENDIAN
fixEndian = id
#else
fixEndian = reverse
#endif
| gcampax/ghc | testsuite/tests/codeGen/should_run/T7600_A.hs | bsd-3-clause | 2,346 | 0 | 13 | 609 | 616 | 325 | 291 | 56 | 4 |
{-# LANGUAGE TupleSections #-}
import Control.Monad (void, forM, when, forever)
import Control.Monad.IO.Class
import Control.Concurrent
import Control.Applicative
import Data.Version
import Data.Char (isSpace)
import Data.Maybe (listToMaybe)
import Data.Ord (comparing)
import Data.List (sortBy,isPrefixOf)
import System.Directory
import System.Exit
import System.Process
import System.FilePath
import System.IO
import qualified Data.Set as S
import Data.Either (partitionEithers)
import Control.Error
import Options.Applicative
import qualified Data.ByteString.Char8 as BS
import System.IO.Temp
import Distribution.Verbosity (Verbosity, normal, verbose)
import Distribution.Simple.Compiler (Compiler (compilerId), compilerFlavor)
import Distribution.Simple.GHC
import Distribution.Simple.Program (defaultProgramConfiguration)
import Distribution.Simple.Compiler (PackageDB (..), PackageDBStack)
import Distribution.Simple.PackageIndex ( PackageIndex, lookupPackageName
, reverseTopologicalOrder
, searchByNameSubstring
)
import Distribution.System (buildPlatform)
import Distribution.InstalledPackageInfo (InstalledPackageInfo_ (..), InstalledPackageInfo)
import Distribution.Package (PackageId, PackageName (..), PackageIdentifier (..))
import qualified Distribution.Simple.InstallDirs as IDirs
import qualified Paths_hoogle_index
import Data.Version
import Hoogle (defaultDatabaseLocation)
-- | Various configuration
data Config = Config { verbosity :: Verbosity
, installTextBase :: Bool
, useLocalDocs :: Bool
, ignoreExistingTextBases :: Bool
, otherPackageDbs :: [PackageDB]
, hoogleCall :: Maybe HoogleCall
}
-- | Calling Hoogle
data HoogleCall = HoogleCall [String]
opts =
Config <$> flag normal verbose
( short 'v' <> long "verbose"
<> help "Enable verbose output"
)
<*> switch
( short 'i' <> long "install"
<> help "Install generated textbases for future use"
)
<*> switch
( short 'l' <> long "local"
<> help "Use local Haddock documentation when available"
)
<*> switch
( long "ignore-existing"
<> help "Always regenerate textbases even if one already exists"
)
<*> many (option (SpecificPackageDB <$> str)
( short 'f' <> long "package-db"
<> help "Add an addition package database (e.g. a Cabal sandbox)"
))
<*> optional
( subparser
(command "hoogle" (info (HoogleCall <$> (some (argument str (metavar "ARGS..."))))
(progDesc"Run Hoogle with proper sandboxing options" <> noIntersperse))
)
)
-- | An unpacked Cabal project
newtype PackageTree = PkgTree FilePath
deriving (Show)
-- | Call a process
callProcessE :: Config -> FilePath -> [String] -> ExceptT String IO ()
callProcessE ver = callProcessIn ver "."
-- | Call a process from the given directory
callProcessIn :: Config
-> FilePath -- ^ directory
-> FilePath -- ^ executable
-> [String] -- ^ arguments
-> ExceptT String IO ()
callProcessIn cfg dir exec args = do
let ver = verbosity cfg
stream
| verbose < ver = CreatePipe
| otherwise = Inherit
let cp = (proc exec args) { cwd = Just dir
, std_err = stream
, std_out = stream
}
(_, hOut, hErr, ph) <- tryIO' $ createProcess cp
let handleStream (Just h) = liftIO $ void $ forkIO $ forever $ hGetLine h
handleStream Nothing = return ()
handleStream hOut
handleStream hErr
code <- liftIO $ waitForProcess ph
case code of
ExitSuccess -> return ()
ExitFailure e -> throwE $ "Process "++exec++" failed with error "++show e
-- | Unpack a package
unpack :: Config -> PackageId -> ExceptT String IO PackageTree
unpack cfg (PackageIdentifier (PackageName pkg) ver) = do
tmpDir <- liftIO getTemporaryDirectory
dir <- liftIO $ createTempDirectory tmpDir "hoogle-index.pkg"
callProcessIn cfg dir "cabal" ["unpack", pkg++"=="++showVersion ver]
return $ PkgTree $ dir </> pkg++"-"++showVersion ver
-- | Remove an unpacked tree
removeTree :: PackageTree -> IO ()
removeTree (PkgTree dir) =
removeDirectoryRecursive $ takeDirectory dir
-- | A Haddock textbase
newtype TextBase = TextBase BS.ByteString
-- | Write a Haddock textbase to a file
writeTextBase :: TextBase -> FilePath -> ExceptT String IO ()
writeTextBase (TextBase content) path = liftIO $ BS.writeFile path content
-- | A file containing a Haddock textbase
type TextBaseFile = FilePath
-- | Find a pre-installed textbase corresponding to a package (if one exists)
findTextBase :: InstalledPackageInfo -> IO (Maybe TextBaseFile)
findTextBase ipkg = do
listToMaybe . catMaybes <$> mapM checkDir (haddockHTMLs ipkg)
where
PackageName name = pkgName $ sourcePackageId ipkg
checkDir root = do
let path = root </> name++".txt"
putStrLn $ "Looking for "++path
exists <- doesFileExist path
if exists
then return $ Just path
else return Nothing
-- | Build a textbase from source
buildTextBase :: Config -> PackageName -> PackageTree -> ExceptT String IO TextBase
buildTextBase cfg (PackageName pkg) (PkgTree dir) = do
callProcessIn cfg dir "cabal" ["configure"]
callProcessIn cfg dir "cabal" ["haddock", "--hoogle"]
let path = dir </> "dist" </> "doc" </> "html" </> pkg </> (pkg++".txt")
TextBase <$> liftIO (BS.readFile path)
placeTextBase :: Config -> InstalledPackageInfo -> TextBase -> ExceptT String IO ()
placeTextBase cfg ipkg tb
| Just docRoot <- listToMaybe $ haddockHTMLs ipkg = tryIO' $ do
let TextBase content = tb
tbPath = docRoot </> name++".txt"
when (verbosity cfg > verbose)
$ liftIO $ putStrLn $ "Installing textbase to "++tbPath
createDirectoryIfMissing True docRoot
BS.writeFile tbPath content
| otherwise =
liftIO $ putStrLn $ "Can't install textbase due to missing documentation directory"
where
pkg = sourcePackageId ipkg
PackageName name = pkgName pkg
getTextBase :: Config -> InstalledPackageInfo -> ExceptT String IO TextBase
getTextBase cfg ipkg = do
existing <- if ignoreExistingTextBases cfg
then return Nothing
else liftIO $ findTextBase ipkg
case existing of
Just path -> TextBase <$> liftIO (BS.readFile path)
Nothing -> do
pkgTree <- unpack cfg pkg
tb <- buildTextBase cfg (pkgName pkg) pkgTree
liftIO $ removeTree pkgTree
when (installTextBase cfg) $ do
-- It's not the end of the world if this fails
result <- liftIO $ runExceptT $ placeTextBase cfg ipkg tb
case result of
Left e -> liftIO $ putStrLn $ name++": Failed to install textbase: "++e
Right _ -> return ()
return tb
where
pkg = sourcePackageId ipkg
PackageName name = pkgName pkg
-- | A Hoogle database
newtype Database = DB FilePath
deriving (Show)
-- | Delete a database file
removeDB :: Database -> IO ()
removeDB (DB path) = removeFile path
-- | Convert a textbase to a Hoogle database
convert :: Config -> TextBaseFile
-> Maybe FilePath -- ^ documentation root
-> [Database]
-> ExceptT String IO Database
convert cfg tbf docRoot merge = do
let docRoot' = maybe [] (\d->["--haddock", "--doc="++d]) docRoot
(tb,h) <- liftIO $ openTempFile "/tmp" "db.hoo"
liftIO $ hClose h
let args = ["convert", tbf, tb] ++ docRoot'
++ map (\(DB db)->"--merge="++db) merge
callProcessE cfg "hoogle" args
return $ DB tb
-- | Generate a Hoogle database for an installed package
indexPackage :: Config -> InstalledPackageInfo -> ExceptT String IO Database
indexPackage cfg ipkg
| pkgName pkg `S.member` downloadPackages = do
tmpDir <- liftIO getTemporaryDirectory
callProcessE cfg "hoogle" ["data", "--datadir="++tmpDir, name]
return $ DB $ tmpDir </> name++".hoo"
| pkgName pkg `S.member` ignorePackages =
throwE $ "Can't build documentation for "++name
| otherwise = do
tb <- getTextBase cfg ipkg
docRoot <- case haddockHTMLs ipkg of
docRoot:_ | useLocalDocs cfg -> return $ Just docRoot
[] | useLocalDocs cfg -> do
liftIO $ putStrLn $ "No local documentation for "++show pkg
return Nothing
_ -> return Nothing
(tbf,h) <- liftIO $ openTempFile "/tmp" "textbase.txt"
liftIO $ hClose h
writeTextBase tb tbf
db <- convert cfg tbf docRoot []
tryIO' $ removeFile tbf
return db
where
downloadPackages = S.fromList $ map PackageName
["base"]
ignorePackages = S.fromList $ map PackageName
[ "rts", "ghc-prim", "integer-gmp", "bin-package-db"
, "ghc", "haskell98", "haskell2010"]
pkg = sourcePackageId ipkg
PackageName name = pkgName pkg
-- | Combine Hoogle databases
combineDBs :: Config -> [Database] -> ExceptT String IO Database
combineDBs cfg dbs = do
tmpDir <- tryIO' $ getTemporaryDirectory
(out, h) <- tryIO' $ openTempFile tmpDir "combined.hoo"
tryIO' $ hClose h
let args = ["combine", "--outfile="++out] ++ map (\(DB db)->db) dbs
callProcessE cfg "hoogle" args
return (DB out)
-- | Install a database in Hoogle's database directory
installDB :: Database -> SandboxPath -> ExceptT String IO ()
installDB (DB db) sp = do
dbDir <- liftIO $ getDatabaseLocation sp
let destDir = dbDir </> "databases"
tryIO' $ createDirectoryIfMissing True destDir
let dest = destDir </> "default.hoo"
liftIO $ copyFile db dest
liftIO $ putStrLn $ "Installed Hoogle index to "++dest
main :: IO ()
main = do
cfg <- execParser
$ info (helper <*> opts)
( fullDesc
<> progDesc "Generate Hoogle indexes for locally install packages"
<> header "hoogle-index - Painless local Hoogle indexing"
<> footer ("hoogle-index version "++showVersion Paths_hoogle_index.version)
)
sandboxPath <- getSandboxPath
case hoogleCall cfg of
Nothing -> do
(compiler, _, progCfg) <- configure (verbosity cfg)
Nothing Nothing
defaultProgramConfiguration
let pkgDbs = getPackageDBs cfg sandboxPath
pkgIdx <- getInstalledPackages (verbosity cfg) pkgDbs progCfg
let pkgs = reverseTopologicalOrder pkgIdx
maybeIndex :: InstalledPackageInfo -> IO (Either (PackageId, String) Database)
maybeIndex pkg = do
putStrLn ""
result <- runExceptT $ indexPackage cfg pkg
case result of
Left e -> do
let pkgId = sourcePackageId pkg
PackageName name = pkgName pkgId
putStrLn $ "Error while indexing "++name++": "++e
return $ Left (pkgId, e)
Right r -> return $ Right r
(failed, idxs) <- fmap partitionEithers $ mapM maybeIndex pkgs
when (not $ null failed) $ do
putStrLn "Failed to build the following indexes:"
let failedMsg (pkgId, reason) = " "++show (pkgName pkgId)++"\t"++reason
putStrLn $ unlines $ map failedMsg failed
res <- runExceptT $ do
combined <- fmapLT ("Error while combining databases: "++)
$ combineDBs cfg idxs
installDB combined sandboxPath
either putStrLn return res
mapM_ removeDB idxs
Just (HoogleCall args) -> do
res <- runExceptT $
callProcessE cfg "hoogle" $ getHoogleArgs sandboxPath args
either putStrLn return res
tryIO' :: IO a -> ExceptT String IO a
tryIO' = fmapLT show . tryIO
------------------------- Sandbox management --------------------------
-- | The sandbox package database location
type SandboxPath = Maybe FilePath
-- | The location of the Hoogle database
getDatabaseLocation :: SandboxPath -> IO FilePath
getDatabaseLocation Nothing = defaultDatabaseLocation
getDatabaseLocation (Just path) = return $ (takeDirectory path) </> "hoogle"
-- | Get the path to the sandbox database if any
getSandboxPath :: IO SandboxPath
getSandboxPath = do
dir <- getCurrentDirectory
let f = dir </> "cabal.sandbox.config"
ex <- doesFileExist f
if ex
then
(listToMaybe .
map (dropWhile isSpace . tail . dropWhile (/= ':')) .
filter (isPrefixOf "package-db") .
lines) <$> readFile f
else return Nothing
-- | Get the package database stack
getPackageDBs :: Config -> SandboxPath -> PackageDBStack
getPackageDBs cfg Nothing = [GlobalPackageDB, UserPackageDB] ++ otherPackageDbs cfg
getPackageDBs cfg (Just sp) = [GlobalPackageDB, SpecificPackageDB sp] ++ otherPackageDbs cfg
-- | Get the Hoogle arguments, using the sandbox databases if any
getHoogleArgs :: SandboxPath -> [String] -> [String]
getHoogleArgs Nothing args = args
getHoogleArgs (Just path) args = args ++ ["-d", (takeDirectory path) </> "hoogle" </> "databases"]
| bgamari/hoogle-index | Main.hs | bsd-3-clause | 13,831 | 0 | 25 | 3,993 | 3,629 | 1,807 | 1,822 | 278 | 4 |
{-# LANGUAGE OverloadedStrings #-}
-- | An unframed snappy stream.
--
-- Example encoding of the string "foobar\\n":
--
-- > 00000000 07 18 66 6f 6f 62 61 72 0a |..foobar.|
--
-- Reference: https://github.com/google/snappy/blob/master/format_description.txt
module Codec.Compression.Snappy.Framed.NoFraming
( parseBlock
) where
import qualified Codec.Compression.Snappy as Snappy
import Data.Attoparsec.ByteString (Parser)
import qualified Data.Attoparsec.ByteString as AP
import Data.ByteString (ByteString)
-- The snappy format itself doesn't have a concept of a header.
-- parseHeader :: Parser ()
-- | Parse a single block of the compressed bytestream, returning a segment
-- of the uncompressed stream.
parseBlock :: Parser ByteString
parseBlock =
Snappy.decompress <$> AP.takeByteString
| asayers/snappy-framed | src/Codec/Compression/Snappy/Framed/NoFraming.hs | bsd-3-clause | 833 | 0 | 6 | 142 | 86 | 60 | 26 | 10 | 1 |
{-# LANGUAGE Safe #-}
module Data.FSA.Internal.NodeState (
NodeState,
newState,
terminal,
nonTerminal,
isTerminal,
confluence,
nonConfluence,
isConfluence
) where
import Data.Word (Word8)
import Data.Bits (setBit, testBit, clearBit)
type NodeState = Word8
terminalBit :: Int
terminalBit = 0
confluenceBit :: Int
confluenceBit = 1
-- | Create a default NodeState (no flags set).
newState :: NodeState
newState = 0
-- | Mark a NodeState as terminal.
terminal :: NodeState -> NodeState
terminal nodeState = setBit nodeState terminalBit
-- | Mark a NodeState as non-terminal.
nonTerminal :: NodeState -> NodeState
nonTerminal nodeState = clearBit nodeState terminalBit
-- | Check if a NodeState is terminal.
isTerminal :: NodeState -> Bool
isTerminal nodeState = testBit nodeState terminalBit
-- | Mark a NodeState as confluence.
confluence :: NodeState -> NodeState
confluence nodeState = setBit nodeState confluenceBit
-- | Mark a NodeState as non-confluence.
nonConfluence :: NodeState -> NodeState
nonConfluence nodeState = clearBit nodeState confluenceBit
-- | Check if a NodeState is a confluence NodeState.
isConfluence :: NodeState -> Bool
isConfluence nodeState = testBit nodeState confluenceBit
| jahaynes/fsa | src/Data/FSA/Internal/NodeState.hs | bsd-3-clause | 1,249 | 0 | 5 | 216 | 236 | 135 | 101 | 31 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
import Control.Monad (unless)
import Data.Monoid
import Data.Version (showVersion)
import Options.Applicative
import System.Exit (ExitCode (ExitSuccess), exitWith)
import System.Process (rawSystem)
import AddHandler (addHandler)
import Devel (DevelOpts (..), devel, DevelTermOpt(..))
import Keter (keter)
import Options (injectDefaults)
import qualified Paths_yesod_bin
import Scaffolding.Scaffolder (scaffold, backendOptions)
import HsFile (mkHsFile)
#ifndef WINDOWS
import Build (touch)
touch' :: IO ()
touch' = touch
windowsWarning :: String
windowsWarning = ""
#else
touch' :: IO ()
touch' = return ()
windowsWarning :: String
windowsWarning = " (does not work on Windows)"
#endif
data CabalPgm = Cabal | CabalDev deriving (Show, Eq)
data Options = Options
{ optCabalPgm :: CabalPgm
, optVerbose :: Bool
, optCommand :: Command
}
deriving (Show, Eq)
data Command = Init { _initBare :: Bool, _initName :: Maybe String, _initDatabase :: Maybe String }
| HsFiles
| Configure
| Build { buildExtraArgs :: [String] }
| Touch
| Devel { _develDisableApi :: Bool
, _develSuccessHook :: Maybe String
, _develFailHook :: Maybe String
, _develRescan :: Int
, _develBuildDir :: Maybe String
, develIgnore :: [String]
, develExtraArgs :: [String]
, _develPort :: Int
, _develTlsPort :: Int
, _proxyTimeout :: Int
, _noReverseProxy :: Bool
, _interruptOnly :: Bool
}
| Test
| AddHandler
{ addHandlerRoute :: Maybe String
, addHandlerPattern :: Maybe String
, addHandlerMethods :: [String]
}
| Keter
{ _keterNoRebuild :: Bool
, _keterNoCopyTo :: Bool
}
| Version
deriving (Show, Eq)
cabalCommand :: Options -> String
cabalCommand mopt
| optCabalPgm mopt == CabalDev = "cabal-dev"
| otherwise = "cabal"
main :: IO ()
main = do
o <- execParser =<< injectDefaults "yesod"
[ ("yesod.devel.extracabalarg" , \o args -> o { optCommand =
case optCommand o of
d@Devel{} -> d { develExtraArgs = args }
c -> c
})
, ("yesod.devel.ignore" , \o args -> o { optCommand =
case optCommand o of
d@Devel{} -> d { develIgnore = args }
c -> c
})
, ("yesod.build.extracabalarg" , \o args -> o { optCommand =
case optCommand o of
b@Build{} -> b { buildExtraArgs = args }
c -> c
})
] optParser'
let cabal = rawSystem' (cabalCommand o)
case optCommand o of
Init{..} -> scaffold _initBare _initName _initDatabase
HsFiles -> mkHsFile
Configure -> cabal ["configure"]
Build es -> touch' >> cabal ("build":es)
Touch -> touch'
Keter{..} -> keter (cabalCommand o) _keterNoRebuild _keterNoCopyTo
Version -> putStrLn ("yesod-bin version: " ++ showVersion Paths_yesod_bin.version)
AddHandler{..} -> addHandler addHandlerRoute addHandlerPattern addHandlerMethods
Test -> cabalTest cabal
Devel{..} -> let develOpts = DevelOpts
{ isCabalDev = optCabalPgm o == CabalDev
, forceCabal = _develDisableApi
, verbose = optVerbose o
, eventTimeout = _develRescan
, successHook = _develSuccessHook
, failHook = _develFailHook
, buildDir = _develBuildDir
, develPort = _develPort
, develTlsPort = _develTlsPort
, proxyTimeout = _proxyTimeout
, useReverseProxy = not _noReverseProxy
, terminateWith = if _interruptOnly then TerminateOnlyInterrupt else TerminateOnEnter
}
in devel develOpts develExtraArgs
where
cabalTest cabal = do touch'
_ <- cabal ["configure", "--enable-tests", "-flibrary-only"]
_ <- cabal ["build"]
cabal ["test"]
optParser' :: ParserInfo Options
optParser' = info (helper <*> optParser) ( fullDesc <> header "Yesod Web Framework command line utility" )
optParser :: Parser Options
optParser = Options
<$> flag Cabal CabalDev ( long "dev" <> short 'd' <> help "use cabal-dev" )
<*> switch ( long "verbose" <> short 'v' <> help "More verbose output" )
<*> subparser ( command "init" (info initOptions
(progDesc "Scaffold a new site"))
<> command "hsfiles" (info (pure HsFiles)
(progDesc "Create a hsfiles file for the current folder"))
<> command "configure" (info (pure Configure)
(progDesc "Configure a project for building"))
<> command "build" (info (Build <$> extraCabalArgs)
(progDesc $ "Build project (performs TH dependency analysis)" ++ windowsWarning))
<> command "touch" (info (pure Touch)
(progDesc $ "Touch any files with altered TH dependencies but do not build" ++ windowsWarning))
<> command "devel" (info develOptions
(progDesc "Run project with the devel server"))
<> command "test" (info (pure Test)
(progDesc "Build and run the integration tests"))
<> command "add-handler" (info addHandlerOptions
(progDesc ("Add a new handler and module to the project."
++ " Interactively asks for input if you do not specify arguments.")))
<> command "keter" (info keterOptions
(progDesc "Build a keter bundle"))
<> command "version" (info (pure Version)
(progDesc "Print the version of Yesod"))
)
initOptions :: Parser Command
initOptions = Init
<$> switch (long "bare" <> help "Create files in current folder")
<*> optStr (long "name" <> short 'n' <> metavar "APP_NAME"
<> help "Set the application name")
<*> optStr (long "database" <> short 'd' <> metavar "DATABASE"
<> help ("Preconfigure for selected database (options: " ++ backendOptions ++ ")"))
keterOptions :: Parser Command
keterOptions = Keter
<$> switch ( long "nobuild" <> short 'n' <> help "Skip rebuilding" )
<*> switch ( long "nocopyto" <> help "Ignore copy-to directive in keter config file" )
defaultRescan :: Int
defaultRescan = 10
develOptions :: Parser Command
develOptions = Devel <$> switch ( long "disable-api" <> short 'd'
<> help "Disable fast GHC API rebuilding")
<*> optStr ( long "success-hook" <> short 's' <> metavar "COMMAND"
<> help "Run COMMAND after rebuild succeeds")
<*> optStr ( long "failure-hook" <> short 'f' <> metavar "COMMAND"
<> help "Run COMMAND when rebuild fails")
<*> option auto ( long "event-timeout" <> short 't' <> value defaultRescan <> metavar "N"
<> help ("Force rescan of files every N seconds (default "
++ show defaultRescan
++ ", use -1 to rely on FSNotify alone)") )
<*> optStr ( long "builddir" <> short 'b'
<> help "Set custom cabal build directory, default `dist'")
<*> many ( strOption ( long "ignore" <> short 'i' <> metavar "DIR"
<> help "ignore file changes in DIR" )
)
<*> extraCabalArgs
<*> option auto ( long "port" <> short 'p' <> value 3000 <> metavar "N"
<> help "Devel server listening port" )
<*> option auto ( long "tls-port" <> short 'q' <> value 3443 <> metavar "N"
<> help "Devel server listening port (tls)" )
<*> option auto ( long "proxy-timeout" <> short 'x' <> value 0 <> metavar "N"
<> help "Devel server timeout before returning 'not ready' message (in seconds, 0 for none)" )
<*> switch ( long "disable-reverse-proxy" <> short 'n'
<> help "Disable reverse proxy" )
<*> switch ( long "interrupt-only" <> short 'c'
<> help "Disable exiting when enter is pressed")
extraCabalArgs :: Parser [String]
extraCabalArgs = many (strOption ( long "extra-cabal-arg" <> short 'e' <> metavar "ARG"
<> help "pass extra argument ARG to cabal")
)
addHandlerOptions :: Parser Command
addHandlerOptions = AddHandler
<$> optStr ( long "route" <> short 'r' <> metavar "ROUTE"
<> help "Name of route (without trailing R). Required.")
<*> optStr ( long "pattern" <> short 'p' <> metavar "PATTERN"
<> help "Route pattern (ex: /entry/#EntryId). Defaults to \"\".")
<*> many (strOption ( long "method" <> short 'm' <> metavar "METHOD"
<> help "Takes one method. Use this multiple times to add multiple methods. Defaults to none.")
)
-- | Optional @String@ argument
optStr :: Mod OptionFields (Maybe String) -> Parser (Maybe String)
optStr m = option (Just <$> str) $ value Nothing <> m
-- | Like @rawSystem@, but exits if it receives a non-success result.
rawSystem' :: String -> [String] -> IO ()
rawSystem' x y = do
res <- rawSystem x y
unless (res == ExitSuccess) $ exitWith res
| ygale/yesod | yesod-bin/main.hs | mit | 10,979 | 0 | 21 | 4,471 | 2,324 | 1,189 | 1,135 | 185 | 14 |
{- |
Module : ./Syntax/Print_AS_Structured.hs
Description : pretty printing of CASL structured specifications
Copyright : (c) Klaus Luettich, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(Grothendieck)
Pretty printing of CASL structured specifications
-}
module Syntax.Print_AS_Structured
( structIRI
, printGroupSpec
, skipVoidGroup
, printUnion
, printExtension
, moveAnnos
, PrettyLG (..)
) where
import Common.Id
import Common.IRI
import Common.Keywords
import Common.Doc
import Common.DocUtils
import Common.AS_Annotation
import Logic.Grothendieck
import Logic.Logic
import Syntax.AS_Structured
sublogicId :: SIMPLE_ID -> Doc
sublogicId = structId . tokStr
structIRI :: IRI -> Doc
structIRI = structId . iriToStringShortUnsecure -- also print user information
class PrettyLG a where
prettyLG :: LogicGraph -> a -> Doc
instance PrettyLG a => PrettyLG (Annoted a) where
prettyLG lg = printAnnoted $ prettyLG lg
instance PrettyLG SPEC where
prettyLG = printSPEC
printUnion :: LogicGraph -> [Annoted SPEC] -> [Doc]
printUnion lg = prepPunctuate (topKey andS <> space) . map (condBracesAnd lg)
printIntersection :: LogicGraph -> [Annoted SPEC] -> [Doc]
printIntersection lg = prepPunctuate (topKey intersectS <> space) . map (condBracesAnd lg)
moveAnnos :: Annoted SPEC -> [Annoted SPEC] -> [Annoted SPEC]
moveAnnos x l = appAnno $ case l of
[] -> error "moveAnnos"
h : r -> h { l_annos = l_annos x ++ l_annos h } : r
where appAnno a = case a of
[] -> []
[h] -> [appendAnno h (r_annos x)]
h : r -> h : appAnno r
printOptUnion :: LogicGraph -> Annoted SPEC -> [Doc]
printOptUnion lg x = case skipVoidGroup $ item x of
Union e@(_ : _) _ -> printUnion lg $ moveAnnos x e
Extension e@(_ : _) _ -> printExtension lg $ moveAnnos x e
_ -> [prettyLG lg x]
printExtension :: LogicGraph -> [Annoted SPEC] -> [Doc]
printExtension lg l = case l of
[] -> []
x : r -> printOptUnion lg x ++
concatMap ((\ u -> case u of
[] -> []
d : s -> (topKey thenS <+> d) : s) .
printOptUnion lg) r
printSPEC :: LogicGraph -> SPEC -> Doc
printSPEC lg spec = case spec of
Basic_spec (G_basic_spec lid basic_spec) _ ->
case lookupCurrentSyntax "" lg of
Just (Logic lid2, sm) -> if language_name lid2 /= language_name lid
then error "printSPEC: logic mismatch"
else case basicSpecPrinter sm lid of
Just p -> p basic_spec
_ -> error $ "printSPEC: no basic spec printer for "
++ showSyntax lid sm
_ -> error "printSPEC: incomplete logic graph"
EmptySpec _ -> specBraces empty
Extraction aa ab -> sep [condBracesTransReduct lg aa, printEXTRACTION ab]
Translation aa ab -> sep [condBracesTransReduct lg aa, printRENAMING ab]
Reduction aa ab -> sep [condBracesTransReduct lg aa, printRESTRICTION ab]
Approximation aa ab ->
sep [condBracesTransReduct lg aa, printAPPROXIMATION ab]
Minimization aa ab ->
sep [condBracesTransReduct lg aa, printMINIMIZATION ab]
Filtering aa ab -> sep [condBracesTransReduct lg aa, printFILTERING ab]
Union aa _ -> sep $ printUnion lg aa
Intersection aa _ -> sep $ printIntersection lg aa
Extension aa _ -> sep $ printExtension lg aa
Free_spec aa _ -> sep [keyword freeS, printGroupSpec lg aa]
Cofree_spec aa _ -> sep [keyword cofreeS, printGroupSpec lg aa]
Minimize_spec aa _ -> sep [keyword minimizeS, printGroupSpec lg aa]
Local_spec aa ab _ -> fsep
[keyword localS, prettyLG lg aa, keyword withinS, condBracesWithin lg ab]
Closed_spec aa _ -> sep [keyword closedS, printGroupSpec lg aa]
Group aa _ -> prettyLG lg aa
Spec_inst aa ab mi _ -> let
r = cat [structIRI aa, print_fit_arg_list lg ab]
in maybe r (\ i -> sep [r, pretty i]) mi
Qualified_spec ln asp _ -> pretty ln <> colon
$+$ prettyLG (setLogicName ln lg) asp
Data ld _ s1 s2 _ -> keyword dataS
<+> printGroupSpec (setCurLogic (show ld) lg) s1
$+$ prettyLG lg s2
Combination n _ -> sep [keyword combineS, pretty n]
Apply i bs _ ->
sep [keyword "apply" <+> pretty i, prettyLG lg $ Basic_spec bs nullRange]
Bridge s1 rs s2 _ -> fsep $ [condBraces lg s1, keyword "bridge"]
++ map pretty rs ++ [condBraces lg s2]
instance Pretty Network where
pretty (Network cs es _) = fsep $ ppWithCommas cs
: if null es then [] else [keyword excludingS, ppWithCommas es]
instance Pretty FILTERING where
pretty = printFILTERING
printFILTERING :: FILTERING -> Doc
printFILTERING (FilterBasicSpec b aa _) =
keyword (if b then selectS else rejectS) <+> pretty aa
printFILTERING (FilterSymbolList b aa _) =
keyword (if b then selectS else rejectS) <+> pretty aa
instance Pretty MINIMIZATION where
pretty = printMINIMIZATION
printMINIMIZATION :: MINIMIZATION -> Doc
printMINIMIZATION (Mini kw cms cvs _) =
fsep $ keyword (tokStr kw) : map pretty cms ++ if null cvs then [] else
keyword "vars" : map pretty cvs
instance Pretty APPROXIMATION where
pretty = printAPPROXIMATION
printAPPROXIMATION :: APPROXIMATION -> Doc
printAPPROXIMATION (ForgetOrKeep b syms ml _) =
fsep $ keyword (if b then forgetS else keepS)
: ppWithCommas syms : maybe [] (\ i -> [keyword withS, pretty i]) ml
instance Pretty EXTRACTION where
pretty = printEXTRACTION
printEXTRACTION :: EXTRACTION -> Doc
printEXTRACTION (ExtractOrRemove b aa _) =
keyword (if b then "extract" else "remove") <+> fsep (map pretty aa)
instance Pretty RENAMING where
pretty = printRENAMING
printRENAMING :: RENAMING -> Doc
printRENAMING (Renaming aa _) =
keyword withS <+> ppWithCommas aa
instance Pretty RESTRICTION where
pretty = printRESTRICTION
printRESTRICTION :: RESTRICTION -> Doc
printRESTRICTION rest = case rest of
Hidden aa _ -> keyword hideS <+> ppWithCommas aa
Revealed aa _ -> keyword revealS <+> pretty aa
printLogicEncoding :: (Pretty a) => a -> Doc
printLogicEncoding enc = keyword logicS <+> pretty enc
instance Pretty G_mapping where
pretty = printG_mapping
printG_mapping :: G_mapping -> Doc
printG_mapping gma = case gma of
G_symb_map gsmil -> pretty gsmil
G_logic_translation enc -> printLogicEncoding enc
instance Pretty G_hiding where
pretty = printG_hiding
printG_hiding :: G_hiding -> Doc
printG_hiding ghid = case ghid of
G_symb_list gsil -> pretty gsil
G_logic_projection enc -> printLogicEncoding enc
instance PrettyLG FIT_ARG where
prettyLG = printFIT_ARG
printFIT_ARG :: LogicGraph -> FIT_ARG -> Doc
printFIT_ARG lg fit = case fit of
Fit_spec aa ab _ ->
let aa' = rmTopKey $ prettyLG lg aa
in if null ab then aa' else
fsep $ aa' : keyword fitS
: punctuate comma (map printG_mapping ab)
Fit_view si ab _ ->
sep [keyword viewS, cat [structIRI si, print_fit_arg_list lg ab]]
instance Pretty Logic_code where
pretty = printLogic_code
printLogic_code :: Logic_code -> Doc
printLogic_code (Logic_code menc msrc mtar _) =
let pm = maybe [] ((: []) . printLogic_name) in
fsep $ maybe [] ((: [colon]) . pretty) menc
++ pm msrc ++ funArrow : pm mtar
instance Pretty LogicDescr where
pretty ld = case ld of
LogicDescr n s _ -> sep [keyword logicS, pretty n,
maybe empty (\ r -> sep [keyword serializationS, pretty r]) s]
SyntaxQual i -> sep [keyword serializationS, pretty i]
LanguageQual i -> sep [keyword "language", pretty i]
instance Pretty Logic_name where
pretty = printLogic_name
printLogic_name :: Logic_name -> Doc
printLogic_name (Logic_name mlog slog ms) = let d = structId mlog in
case slog of
Nothing -> d
Just sub -> d <> dot <> sublogicId sub
<> maybe empty (parens . pretty) ms
instance Pretty LABELED_ONTO_OR_INTPR_REF where
pretty = printLIRI
printLIRI :: LABELED_ONTO_OR_INTPR_REF -> Doc
printLIRI (Labeled n i) = case n of
Just x -> pretty x <+> colon <+> pretty i
Nothing -> pretty i
{- |
specialized printing of 'FIT_ARG's
-}
print_fit_arg_list :: LogicGraph -> [Annoted FIT_ARG] -> Doc
print_fit_arg_list lg = cat . map (brackets . prettyLG lg)
{- |
conditional generation of grouping braces for Union and Extension
-}
printGroupSpec :: LogicGraph -> Annoted SPEC -> Doc
printGroupSpec lg s = let d = prettyLG lg s in
case skip_Group $ item s of
Spec_inst {} -> d
_ -> specBraces d
{- |
generate grouping braces for Tanslations and Reductions
-}
condBracesTransReduct :: LogicGraph -> Annoted SPEC -> Doc
condBracesTransReduct lg s = let d = prettyLG lg s in
case skip_Group $ item s of
Bridge {} -> specBraces d
Extension {} -> specBraces d
Union {} -> specBraces d
Intersection {} -> specBraces d
Local_spec {} -> specBraces d
_ -> d
{- |
generate grouping braces for Within
-}
condBracesWithin :: LogicGraph -> Annoted SPEC -> Doc
condBracesWithin lg s = let d = prettyLG lg s in
case skip_Group $ item s of
Bridge {} -> specBraces d
Extension {} -> specBraces d
Union {} -> specBraces d
Intersection {} -> specBraces d
_ -> d
{- |
only Extensions inside of Unions (and) need grouping braces
-}
condBracesAnd :: LogicGraph -> Annoted SPEC -> Doc
condBracesAnd lg s = let d = prettyLG lg s in
case skip_Group $ item s of
Bridge {} -> specBraces d
Extension {} -> specBraces d
_ -> d
-- bridges inside bridges need grouping
condBraces :: LogicGraph -> Annoted SPEC -> Doc
condBraces lg s = let d = prettyLG lg s in
case skip_Group $ item s of
Bridge {} -> specBraces d
_ -> d
-- | only skip groups without annotations
skipVoidGroup :: SPEC -> SPEC
skipVoidGroup sp =
case sp of
Group g _ | null (l_annos g) && null (r_annos g)
-> skipVoidGroup $ item g
_ -> sp
-- | skip nested groups
skip_Group :: SPEC -> SPEC
skip_Group sp =
case sp of
Group g _ -> skip_Group $ item g
_ -> sp
| spechub/Hets | Syntax/Print_AS_Structured.hs | gpl-2.0 | 10,509 | 0 | 20 | 2,766 | 3,447 | 1,691 | 1,756 | 231 | 26 |
module PrettyJSON
(
renderJValue
) where
import Data.Bits ((.&.), shiftR)
import Numeric (showHex)
import Data.Char (ord)
import Prettify (Doc, (<>), char, double, fsep, hcat, punctuate, text,
compact, pretty)
import SimpleJSON (JValue(..))
renderJValue :: JValue -> Doc
renderJValue (JBool True) = text "true"
renderJValue (JBool False) = text "false"
renderJValue JNull = text "null"
renderJValue (JNumber num) = double num
renderJValue (JString str) = string str
renderJValue (JArray ary) = series '[' ']' renderJValue ary
renderJValue (JObject obj) = series '{' '}' field obj
where field (name,val) = string name
<> text ": "
<> renderJValue val
string :: String -> Doc
string = enclose '"' '"' . hcat . map oneChar
enclose :: Char -> Char -> Doc -> Doc
enclose left right x = char left <> x <> char right
oneChar :: Char -> Doc
oneChar c = case lookup c simpleEscapes of
Just r -> text r
Nothing | mustEscape -> hexEscape c
| otherwise -> char c
where mustEscape = c < ' ' || c == '\x7f' || c > '\xff'
simpleEscapes :: [(Char, String)]
simpleEscapes = zipWith ch "\b\n\f\r\t\\\"/" "bnfrt\\\"/"
where ch a b = (a, ['\\',b])
smallHex :: Int -> Doc
smallHex x = text "\\u"
<> text (replicate (4 - length h) '0')
<> text h
where h = showHex x ""
astral :: Int -> Doc
astral n = smallHex (a + 0xd800) <> smallHex (b + 0xdc00)
where a = (n `shiftR` 10) .&. 0x3ff
b = n .&. 0x3ff
hexEscape :: Char -> Doc
hexEscape c | d < 0x10000 = smallHex d
| otherwise = astral (d - 0x10000)
where d = ord c
series :: Char -> Char -> (a -> Doc) -> [a] -> Doc
series open close item = enclose open close
. fsep . punctuate (char ',') . map item
| timstclair/experimental | haskell/real_world_haskell/ch05/PrettyJSON.hs | unlicense | 1,883 | 0 | 12 | 560 | 738 | 381 | 357 | 49 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module Args
( BuildArg(..)
, buildArgs)
where
import System.Console.ParseArgs
-- Command line args for the buildbot.
data BuildArg
= ArgHelp
| ArgVerbose
-- Automated builds
| ArgDaily
| ArgDailyNow
| ArgDailyTomorrow
-- Building GHC and libs.
| ArgScratchDir
| ArgGhcUnpack
| ArgGhcBuild
| ArgGhcUnpackBuild
| ArgGhcUse
| ArgLibs
-- Testing DPH and Repa
| ArgDoTestDPH
| ArgDoTestRepa
| ArgDoTestNoSlow
| ArgUseDPH
| ArgUseRepa
| ArgUseNoSlow
| ArgTestIterations
| ArgMailFrom
| ArgMailTo
| ArgMailFailTo
| ArgMailBanner
| ArgMailBranchName
| ArgSendTestMail
| ArgWriteResults
| ArgWriteResultsStamped
| ArgUploadResults
| ArgAgainstResults
| ArgSwingFraction
deriving (Eq, Ord, Show)
buildArgs :: [Arg BuildArg]
buildArgs
= [ Arg { argIndex = ArgHelp
, argAbbr = Just 'h'
, argName = Just "help"
, argData = Nothing
, argDesc = "Print this usage help." }
, Arg { argIndex = ArgVerbose
, argAbbr = Just 'v'
, argName = Just "verbose"
, argData = Nothing
, argDesc = "Verbose logging of build commands." }
-- Automated builds
, Arg { argIndex = ArgDaily
, argAbbr = Nothing
, argName = Just "daily"
, argData = argDataOptional "time" ArgtypeString
, argDesc = "Run the build commands every day at this time. fmt: HH:MM:SS" }
, Arg { argIndex = ArgDailyNow
, argAbbr = Nothing
, argName = Just "now"
, argData = Nothing
, argDesc = "(opt. for --daily) Also run the build right now." }
, Arg { argIndex = ArgDailyTomorrow
, argAbbr = Nothing
, argName = Just "tomorrow"
, argData = Nothing
, argDesc = "(opt. for --daily) Run the first build tomorrow." }
-- Building GHC and libs.
, Arg { argIndex = ArgScratchDir
, argAbbr = Nothing
, argName = Just "scratch"
, argData = argDataOptional "dir" ArgtypeString
, argDesc = "For --ghc-unpack and --ghc-unpack-build, where to put the unpacked tree." }
, Arg { argIndex = ArgGhcUnpack
, argAbbr = Nothing
, argName = Just "ghc-unpack"
, argData = argDataOptional "file" ArgtypeString
, argDesc = "Unpack this GHC snapshot and update it from darcs.haskell.org." }
, Arg { argIndex = ArgGhcBuild
, argAbbr = Nothing
, argName = Just "ghc-build"
, argData = argDataOptional "dir" ArgtypeString
, argDesc = "Build an already unpacked and updated GHC snapshot." }
, Arg { argIndex = ArgGhcUnpackBuild
, argAbbr = Nothing
, argName = Just "ghc-unpack-build"
, argData = argDataOptional "file" ArgtypeString
, argDesc = "Unpack this GHC snapshot, update, and build it." }
, Arg { argIndex = ArgGhcUse
, argAbbr = Nothing
, argName = Just "ghc-use"
, argData = argDataOptional "dir" ArgtypeString
, argDesc = "Use the previously built GHC in this dir" }
, Arg { argIndex = ArgLibs
, argAbbr = Nothing
, argName = Just "ghc-libs"
, argData = argDataOptional "spec" ArgtypeString
, argDesc = "Install some libraries into the GHC build" }
-- Testing DPH and Repa
, Arg { argIndex = ArgDoTestDPH
, argAbbr = Nothing
, argName = Just "test-dph"
, argData = Nothing
, argDesc = "Run DPH regression tests." }
, Arg { argIndex = ArgDoTestRepa
, argAbbr = Nothing
, argName = Just "test-repa"
, argData = Nothing
, argDesc = "Run Repa regression tests." }
, Arg { argIndex = ArgDoTestNoSlow
, argAbbr = Nothing
, argName = Just "test-noslow"
, argData = Nothing
, argDesc = "Run NoSlow regression tests." }
, Arg { argIndex = ArgUseDPH
, argAbbr = Nothing
, argName = Just "use-dph"
, argData = argDataOptional "dir" ArgtypeString
, argDesc = "Use this DPH repo for testing." }
, Arg { argIndex = ArgUseRepa
, argAbbr = Nothing
, argName = Just "use-repa"
, argData = argDataOptional "dir" ArgtypeString
, argDesc = "Use this Repa repo for testing." }
, Arg { argIndex = ArgUseNoSlow
, argAbbr = Nothing
, argName = Just "use-noslow"
, argData = argDataOptional "dir" ArgtypeString
, argDesc = "Use this NoSlow repo for testing." }
, Arg { argIndex = ArgTestIterations
, argAbbr = Just 'i'
, argName = Just "iterations"
, argData = argDataDefaulted "int" ArgtypeInt 1
, argDesc = "(opt. for test modes) Number of times to run each benchmark." }
, Arg { argIndex = ArgAgainstResults
, argAbbr = Just 'a'
, argName = Just "against"
, argData = argDataOptional "file" ArgtypeString
, argDesc = "(opt. for test modes) Print running comparison against results in this file." }
, Arg { argIndex = ArgSwingFraction
, argAbbr = Just 's'
, argName = Just "swing"
, argData = argDataOptional "fraction" ArgtypeDouble
, argDesc = "(opt. for test modes) Treat a fractional swing vs the baseline as interesting (eg 0.1)" }
, Arg { argIndex = ArgWriteResults
, argAbbr = Just 'w'
, argName = Just "write"
, argData = argDataOptional "file" ArgtypeString
, argDesc = "(opt. for test modes) Write results to this file." }
, Arg { argIndex = ArgWriteResultsStamped
, argAbbr = Just 'p'
, argName = Just "write-stamped"
, argData = argDataOptional "file" ArgtypeString
, argDesc = "(opt. for test modes) ... appending a time stamp to the name." }
, Arg { argIndex = ArgUploadResults
, argAbbr = Just 'u'
, argName = Just "upload"
, argData = argDataOptional "scp-path" ArgtypeString
, argDesc = "(opt. for test modes) ... and scp the results to this path." }
, Arg { argIndex = ArgMailFrom
, argAbbr = Nothing
, argName = Just "mail-from"
, argData = argDataOptional "address" ArgtypeString
, argDesc = "(opt. for test modes) Send test results from this address." }
, Arg { argIndex = ArgMailTo
, argAbbr = Nothing
, argName = Just "mail-to"
, argData = argDataOptional "address" ArgtypeString
, argDesc = "(opt. for test modes) ... to this address." }
, Arg { argIndex = ArgMailFailTo
, argAbbr = Nothing
, argName = Just "mail-fail-to"
, argData = argDataOptional "address" ArgtypeString
, argDesc = "(opt. for test modes) ... but send failure messages to this other address." }
, Arg { argIndex = ArgMailBanner
, argAbbr = Nothing
, argName = Just "mail-banner"
, argData = argDataOptional "file" ArgtypeString
, argDesc = "(opt. for test modes) ... appending the banner to the front of the message." }
, Arg { argIndex = ArgMailBranchName
, argAbbr = Nothing
, argName = Just "mail-branch-name"
, argData = argDataOptional "name" ArgtypeString
, argDesc = "(opt. for test modes) ... putting this branch name in the subject." }
-- Setup debugging
, Arg { argIndex = ArgSendTestMail
, argAbbr = Nothing
, argName = Just "send-test-mail"
, argData = Nothing
, argDesc = "Send a test mail to check mailer configuration." }
]
| mainland/dph | dph-buildbot/src/Args.hs | bsd-3-clause | 7,126 | 310 | 8 | 1,847 | 1,696 | 1,003 | 693 | 183 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Tuura.Concept.Abstract (
Concept (..),
initialConcept, excitedConcept, invariantConcept,
(.&&.), (.||.),
quiescent
) where
import Control.Applicative
-- Abstract concepts
-- * s is the type of states
-- * e is the type of events
data Concept s e = Concept
{
initial :: s -> Bool,
excited :: e -> s -> Bool,
invariant :: s -> Bool
}
-- Concepts form a monoid:
-- * the empty concept permits everything
-- * two concepts are combined by AND-ing all predicates
instance Monoid (Concept s e) where
mempty = Concept
{
initial = const True,
excited = const $ const True,
invariant = const True
}
mappend a b = Concept
{
initial = initial a .&&. initial b,
excited = \e -> excited a e .&&. excited b e,
invariant = invariant a .&&. invariant b
}
excitedConcept :: (e -> s -> Bool) -> Concept s e
excitedConcept f = mempty { excited = f }
initialConcept :: (s -> Bool) -> Concept s e
initialConcept f = mempty { initial = f }
invariantConcept :: (s -> Bool) -> Concept s e
invariantConcept f = mempty { invariant = f }
(.&&.) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
(.&&.) = liftA2 (&&)
(.||.) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
(.||.) = liftA2 (||)
quiescent :: Concept s e -> e -> s -> Bool
quiescent c e = not . excited c e
| tuura/concepts | src/Tuura/Concept/Abstract.hs | bsd-3-clause | 1,616 | 0 | 10 | 601 | 464 | 264 | 200 | 32 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Misc
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Various high-level functions to further classify.
module Yi.Misc ( getAppropriateFiles, getFolder, cd, pwd, matchingFileNames
, rot13Char, placeMark, selectAll, adjBlock, adjIndent
, promptFile , promptFileChangingHints, matchFile, completeFile
, printFileInfoE, debugBufferContent
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (filterM, (>=>))
import Control.Monad.Base (liftBase)
import Data.Char (chr, isAlpha, isLower, isUpper, ord)
import Data.List ((\\))
import Data.Maybe (isNothing)
import qualified Data.Text as T (Text, append, concat, isPrefixOf,
pack, stripPrefix, unpack)
import System.CanonicalizePath (canonicalizePath, replaceShorthands, replaceShorthands)
import System.Directory (doesDirectoryExist,
getCurrentDirectory,
getDirectoryContents,
setCurrentDirectory)
import System.Environment (lookupEnv)
import System.FilePath (addTrailingPathSeparator,
hasTrailingPathSeparator,
takeDirectory, takeFileName, (</>))
import System.FriendlyPath (expandTilda, isAbsolute')
import Yi.Buffer
import Yi.Completion (completeInList')
import Yi.Editor (EditorM, printMsg, withCurrentBuffer)
import Yi.Keymap (YiM)
import Yi.MiniBuffer (debugBufferContent, mkCompleteFn, withMinibufferGen)
import Yi.Monad (gets)
import qualified Yi.Rope as R (fromText)
import Yi.Utils (io)
-- | Given a possible starting path (which if not given defaults to
-- the current directory) and a fragment of a path we find all files
-- within the given (or current) directory which can complete the
-- given path fragment. We return a pair of both directory plus the
-- filenames on their own that is without their directories. The
-- reason for this is that if we return all of the filenames then we
-- get a 'hint' which is way too long to be particularly useful.
getAppropriateFiles :: Maybe T.Text -> T.Text -> YiM (T.Text, [ T.Text ])
getAppropriateFiles start s' = do
curDir <- case start of
Nothing -> do bufferPath <- withCurrentBuffer $ gets file
liftBase $ getFolder bufferPath
Just path -> return $ T.unpack path
let s = T.unpack $ replaceShorthands s'
sDir = if hasTrailingPathSeparator s then s else takeDirectory s
searchDir
| null sDir = curDir
| isAbsolute' sDir = sDir
| otherwise = curDir </> sDir
searchDir' <- liftBase $ expandTilda searchDir
let fixTrailingPathSeparator f = do
isDir <- doesDirectoryExist (searchDir' </> f)
return . T.pack $ if isDir then addTrailingPathSeparator f else f
files <- liftBase $ getDirectoryContents searchDir'
-- Remove the two standard current-dir and parent-dir as we do not
-- need to complete or hint about these as they are known by users.
let files' = files \\ [ ".", ".." ]
fs <- liftBase $ mapM fixTrailingPathSeparator files'
let matching = filter (T.isPrefixOf . T.pack $ takeFileName s) fs
return (T.pack sDir, matching)
-- | Given a path, trim the file name bit if it exists. If no path
-- given, return current directory.
getFolder :: Maybe String -> IO String
getFolder Nothing = getCurrentDirectory
getFolder (Just path) = do
isDir <- doesDirectoryExist path
let dir = if isDir then path else takeDirectory path
if null dir then getCurrentDirectory else return dir
-- | Given a possible path and a prefix, return matching file names.
matchingFileNames :: Maybe T.Text -> T.Text -> YiM [T.Text]
matchingFileNames start s = do
(sDir, files) <- getAppropriateFiles start s
-- There is one common case when we don't need to prepend @sDir@ to @files@:
--
-- Suppose user just wants to edit a file "foobar" in current directory
-- and inputs ":e foo<Tab>"
--
-- @sDir@ in this case equals to "." and "foo" would not be
-- a prefix of ("." </> "foobar"), resulting in a failed completion
--
-- However, if user inputs ":e ./foo<Tab>", we need to prepend @sDir@ to @files@
let results = if isNothing start && sDir == "." && not ("./" `T.isPrefixOf` s)
then files
else fmap (T.pack . (T.unpack sDir </>) . T.unpack) files
return results
-- | Place mark at current point. If there's an existing mark at point
-- already, deactivate mark.
placeMark :: BufferM ()
placeMark = (==) <$> pointB <*> getSelectionMarkPointB >>= \case
True -> setVisibleSelection False
False -> setVisibleSelection True >> pointB >>= setSelectionMarkPointB
-- | Select the contents of the whole buffer
selectAll :: BufferM ()
selectAll = botB >> placeMark >> topB >> setVisibleSelection True
adjBlock :: Int -> BufferM ()
adjBlock x = withSyntaxB' (\m s -> modeAdjustBlock m s x)
-- | A simple wrapper to adjust the current indentation using
-- the mode specific indentation function but according to the
-- given indent behaviour.
adjIndent :: IndentBehaviour -> BufferM ()
adjIndent ib = withSyntaxB' (\m s -> modeIndent m s ib)
-- | Generic emacs style prompt file action. Takes a @prompt@ and a continuation
-- @act@ and prompts the user with file hints.
promptFile :: T.Text -> (T.Text -> YiM ()) -> YiM ()
promptFile prompt act = promptFileChangingHints prompt (const return) act
-- | As 'promptFile' but additionally allows the caller to transform
-- the list of hints arbitrarily, such as only showing directories.
promptFileChangingHints :: T.Text -- ^ Prompt
-> (T.Text -> [T.Text] -> YiM [T.Text])
-- ^ Hint transformer: current path, generated hints
-> (T.Text -> YiM ()) -- ^ Action over choice
-> YiM ()
promptFileChangingHints prompt ht act = do
maybePath <- withCurrentBuffer $ gets file
startPath <- T.pack . addTrailingPathSeparator
<$> liftBase (canonicalizePath =<< getFolder maybePath)
-- TODO: Just call withMinibuffer
withMinibufferGen startPath (\x -> findFileHint startPath x >>= ht x) prompt
(completeFile startPath) showCanon (act . replaceShorthands)
where
showCanon = withCurrentBuffer . replaceBufferContent . R.fromText . replaceShorthands
matchFile :: T.Text -> T.Text -> Maybe T.Text
matchFile path proposedCompletion =
let realPath = replaceShorthands path
in T.append path <$> T.stripPrefix realPath proposedCompletion
completeFile :: T.Text -> T.Text -> YiM T.Text
completeFile startPath =
mkCompleteFn completeInList' matchFile $ matchingFileNames (Just startPath)
-- | For use as the hint when opening a file using the minibuffer. We
-- essentially return all the files in the given directory which have
-- the given prefix.
findFileHint :: T.Text -> T.Text -> YiM [T.Text]
findFileHint startPath s = snd <$> getAppropriateFiles (Just startPath) s
onCharLetterCode :: (Int -> Int) -> Char -> Char
onCharLetterCode f c | isAlpha c = chr (f (ord c - a) `mod` 26 + a)
| otherwise = c
where a | isUpper c = ord 'A'
| isLower c = ord 'a'
| otherwise = undefined
-- | Like @M-x cd@, it changes the current working directory. Mighty
-- useful when we don't start Yi from the project directory or want to
-- switch projects, as many tools only use the current working
-- directory.
cd :: YiM ()
cd = promptFileChangingHints "switch directory to:" dirs $ \path ->
io $ getFolder (Just $ T.unpack path) >>= clean . T.pack
>>= System.Directory.setCurrentDirectory . addTrailingPathSeparator
where
replaceHome p@('~':'/':xs) = lookupEnv "HOME" >>= return . \case
Nothing -> p
Just h -> h </> xs
replaceHome p = return p
clean = replaceHome . T.unpack . replaceShorthands >=> canonicalizePath
x <//> y = T.pack $ takeDirectory (T.unpack x) </> T.unpack y
dirs :: T.Text -> [T.Text] -> YiM [T.Text]
dirs x xs = do
xsc <- io $ mapM (\y -> (,y) <$> clean (x <//> y)) xs
filterM (io . doesDirectoryExist . fst) xsc >>= return . map snd
-- | Shows current working directory. Also see 'cd'.
pwd :: YiM ()
pwd = io getCurrentDirectory >>= printMsg . T.pack
rot13Char :: Char -> Char
rot13Char = onCharLetterCode (+13)
printFileInfoE :: EditorM ()
printFileInfoE = printMsg . showBufInfo =<< withCurrentBuffer bufInfoB
where showBufInfo :: BufferFileInfo -> T.Text
showBufInfo bufInfo = T.concat
[ T.pack $ bufInfoFileName bufInfo
, " Line "
, T.pack . show $ bufInfoLineNo bufInfo
, " ["
, bufInfoPercent bufInfo
, "]"
]
| TOSPIO/yi | src/library/Yi/Misc.hs | gpl-2.0 | 9,523 | 0 | 17 | 2,595 | 2,113 | 1,117 | 996 | -1 | -1 |
{-# language GeneralizedNewtypeDeriving, MultiParamTypeClasses, FlexibleInstances,
DeriveDataTypeable, DeriveFunctor, DeriveFoldable, TypeSynonymInstances #-}
-- | version of the save type from 2011-03-25
module Legacy.Old2 where
import Data.Data
import qualified Data.IntMap
import Data.Convertable
import Data.Foldable
import qualified Base.Types
import qualified Editor.Pickle.Types as Newer
type SaveType = Grounds PickleObject
data Grounds a = Grounds {
backgrounds :: Indexable (Layer a),
mainLayer :: Layer a,
foregrounds :: Indexable (Layer a)
}
deriving (Show, Read, Data, Typeable)
instance Convertable SaveType (Newer.SaveType) where
convert (Grounds bgs ml fgs) =
Newer.PGrounds_1
(convert $ toList bgs)
(convert $ content ml)
(convert $ toList fgs)
data Indexable a = Indexable {
values :: Data.IntMap.IntMap a,
keys :: [Index]
}
deriving (Show, Read, Data, Typeable, Foldable)
instance Convertable (Indexable PickleObject) [(Int, Newer.PObject)] where
convert (Indexable values keys) =
fmap (\ k -> (index k, convert (values Data.IntMap.! index k))) keys
newtype Index = Index {index :: Int}
deriving (Show, Read, Enum, Num, Eq, Integral, Real, Ord, Data, Typeable)
data Layer a = Layer {
content :: (Indexable a),
xDistance :: Double,
yDistance :: Double
}
deriving (Show, Read, Data, Typeable)
instance Convertable (Layer PickleObject) Newer.PLayer where
convert (Layer content xd yd) =
Newer.PLayer_1 (convert $ toList content) xd yd
data PickleObject = PickleObject {
pickleSortId :: SortId,
picklePosition :: EditorPosition,
pickleOEMState :: Maybe String
}
deriving (Read, Show)
instance Convertable PickleObject Newer.PObject where
convert (PickleObject sortId pos oemState) =
Newer.PObject_1 (convert sortId) (convert pos) oemState
newtype SortId = SortId {getSortId :: FilePath}
deriving (Show, Read, Eq)
instance Convertable SortId Base.Types.SortId where
convert (SortId x) = Base.Types.SortId x
data EditorPosition = EditorPosition {
editorX :: Double,
editorY :: Double
}
deriving (Show, Read, Eq, Typeable, Data)
instance Convertable EditorPosition Base.Types.EditorPosition where
convert (EditorPosition x y) =
Base.Types.EditorPosition x y
| geocurnoff/nikki | src/Legacy/Old2.hs | lgpl-3.0 | 2,377 | 0 | 13 | 493 | 726 | 398 | 328 | 57 | 0 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, GeneralizedNewtypeDeriving #-}
module XenMgr.Rpc
( Rpc
, rpc
, LiftRpc (..)
, module Rpc.Core
) where
import Control.Applicative
import Control.Monad.Error
import qualified Control.Exception as E
import Rpc.Core
import XenMgr.Errors
import Tools.FreezeIOM
newtype Rpc a = Rpc (RpcM XmError a)
deriving (Monad, MonadError XmError, MonadRpc XmError, FreezeIOM RpcContext (Either XmError))
rpc :: RpcContext -> Rpc a -> IO (Either XmError a)
rpc ctx (Rpc f) = runRpcM f ctx
instance Functor Rpc where
fmap = liftM
instance Applicative Rpc where
pure = return
(<*>) = ap
instance MonadIO Rpc where
liftIO act = Rpc $
do status <- liftIO $ E.try act
-- translate io errors into monadic version
tunnel status
where
tunnel :: Either E.SomeException a -> RpcM XmError a
tunnel (Right v) = return v
tunnel (Left err) = failIO $ show err
class LiftRpc m where
liftRpc :: Rpc a -> m a
instance LiftRpc Rpc where
liftRpc f = f
| jean-edouard/manager | xenmgr/XenMgr/Rpc.hs | gpl-2.0 | 1,863 | 0 | 12 | 424 | 354 | 195 | 159 | 32 | 1 |
{-# LANGUAGE NondecreasingIndentation #-}
-- Implements the \"@.\/cabal sdist@\" command, which creates a source
-- distribution for this package. That is, packs up the source code
-- into a tarball, making use of the corresponding Cabal module.
module Distribution.Client.SrcDist (
sdist,
allPackageSourceFiles
) where
import Distribution.Client.SetupWrapper
( SetupScriptOptions(..), defaultSetupScriptOptions, setupWrapper )
import Distribution.Client.Tar (createTarGzFile)
import Distribution.Package
( Package(..), packageName )
import Distribution.PackageDescription
( PackageDescription )
import Distribution.PackageDescription.Configuration
( flattenPackageDescription )
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.Simple.Utils
( createDirectoryIfMissingVerbose, defaultPackageDesc
, warn, die, notice, withTempDirectory )
import Distribution.Client.Setup
( SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) )
import Distribution.Simple.Setup
( Flag(..), sdistCommand, flagToList, fromFlag, fromFlagOrDefault
, defaultSDistFlags )
import Distribution.Simple.BuildPaths ( srcPref)
import Distribution.Simple.Program (requireProgram, simpleProgram, programPath)
import Distribution.Simple.Program.Db (emptyProgramDb)
import Distribution.Text ( display )
import Distribution.Verbosity (Verbosity, normal, lessVerbose)
import Distribution.Version (Version(..), orLaterVersion)
import Distribution.Client.Utils
(tryFindAddSourcePackageDesc)
import Distribution.Compat.Exception (catchIO)
import System.FilePath ((</>), (<.>))
import Control.Monad (when, unless, liftM)
import System.Directory (doesFileExist, removeFile, canonicalizePath, getTemporaryDirectory)
import System.Process (runProcess, waitForProcess)
import System.Exit (ExitCode(..))
import Control.Exception (IOException, evaluate)
-- |Create a source distribution.
sdist :: SDistFlags -> SDistExFlags -> IO ()
sdist flags exflags = do
pkg <- liftM flattenPackageDescription
(readPackageDescription verbosity =<< defaultPackageDesc verbosity)
let withDir = if not needMakeArchive then (\f -> f tmpTargetDir)
else withTempDirectory verbosity tmpTargetDir "sdist."
-- 'withTempDir' fails if we don't create 'tmpTargetDir'...
when needMakeArchive $
createDirectoryIfMissingVerbose verbosity True tmpTargetDir
withDir $ \tmpDir -> do
let outDir = if isOutDirectory then tmpDir else tmpDir </> tarBallName pkg
flags' = (if not needMakeArchive then flags
else flags { sDistDirectory = Flag outDir })
unless isListSources $
createDirectoryIfMissingVerbose verbosity True outDir
-- Run 'setup sdist --output-directory=tmpDir' (or
-- '--list-source'/'--output-directory=someOtherDir') in case we were passed
-- those options.
setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags') []
-- Unless we were given --list-sources or --output-directory ourselves,
-- create an archive.
when needMakeArchive $
createArchive verbosity pkg tmpDir distPref
when isOutDirectory $
notice verbosity $ "Source directory created: " ++ tmpTargetDir
when isListSources $
notice verbosity $ "List of package sources written to file '"
++ (fromFlag . sDistListSources $ flags) ++ "'"
where
flagEnabled f = not . null . flagToList . f $ flags
isListSources = flagEnabled sDistListSources
isOutDirectory = flagEnabled sDistDirectory
needMakeArchive = not (isListSources || isOutDirectory)
verbosity = fromFlag (sDistVerbosity flags)
distPref = fromFlag (sDistDistPref flags)
tmpTargetDir = fromFlagOrDefault (srcPref distPref) (sDistDirectory flags)
setupOpts = defaultSetupScriptOptions {
-- The '--output-directory' sdist flag was introduced in Cabal 1.12, and
-- '--list-sources' in 1.17.
useCabalVersion = if isListSources
then orLaterVersion $ Version [1,17,0] []
else orLaterVersion $ Version [1,12,0] []
}
format = fromFlag (sDistFormat exflags)
createArchive = case format of
TargzFormat -> createTarGzArchive
ZipFormat -> createZipArchive
tarBallName :: PackageDescription -> String
tarBallName = display . packageId
-- | Create a tar.gz archive from a tree of source files.
createTarGzArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath
-> IO ()
createTarGzArchive verbosity pkg tmpDir targetPref = do
createTarGzFile tarBallFilePath tmpDir (tarBallName pkg)
notice verbosity $ "Source tarball created: " ++ tarBallFilePath
where
tarBallFilePath = targetPref </> tarBallName pkg <.> "tar.gz"
-- | Create a zip archive from a tree of source files.
createZipArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath
-> IO ()
createZipArchive verbosity pkg tmpDir targetPref = do
let dir = tarBallName pkg
zipfile = targetPref </> dir <.> "zip"
(zipProg, _) <- requireProgram verbosity zipProgram emptyProgramDb
-- zip has an annoying habit of updating the target rather than creating
-- it from scratch. While that might sound like an optimisation, it doesn't
-- remove files already in the archive that are no longer present in the
-- uncompressed tree.
alreadyExists <- doesFileExist zipfile
when alreadyExists $ removeFile zipfile
-- We call zip with a different CWD, so have to make the path
-- absolute. Can't just use 'canonicalizePath zipfile' since this function
-- requires its argument to refer to an existing file.
zipfileAbs <- fmap (</> dir <.> "zip") . canonicalizePath $ targetPref
--TODO: use runProgramInvocation, but has to be able to set CWD
hnd <- runProcess (programPath zipProg) ["-q", "-r", zipfileAbs, dir]
(Just tmpDir)
Nothing Nothing Nothing Nothing
exitCode <- waitForProcess hnd
unless (exitCode == ExitSuccess) $
die $ "Generating the zip file failed "
++ "(zip returned exit code " ++ show exitCode ++ ")"
notice verbosity $ "Source zip archive created: " ++ zipfile
where
zipProgram = simpleProgram "zip"
-- | List all source files of a given add-source dependency. Exits with error if
-- something is wrong (e.g. there is no .cabal file in the given directory).
allPackageSourceFiles :: Verbosity -> FilePath -> IO [FilePath]
allPackageSourceFiles verbosity packageDir = do
pkg <- do
let err = "Error reading source files of package."
desc <- tryFindAddSourcePackageDesc packageDir err
flattenPackageDescription `fmap` readPackageDescription verbosity desc
globalTmp <- getTemporaryDirectory
withTempDirectory verbosity globalTmp "cabal-list-sources." $ \tempDir -> do
let file = tempDir </> "cabal-sdist-list-sources"
flags = defaultSDistFlags {
sDistVerbosity = Flag $ if verbosity == normal
then lessVerbose verbosity else verbosity,
sDistListSources = Flag file
}
setupOpts = defaultSetupScriptOptions {
-- 'sdist --list-sources' was introduced in Cabal 1.18.
useCabalVersion = orLaterVersion $ Version [1,18,0] [],
useWorkingDir = Just packageDir
}
doListSources :: IO [FilePath]
doListSources = do
setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags) []
fmap lines . readFile $ file
onFailedListSources :: IOException -> IO ()
onFailedListSources e = do
warn verbosity $
"Could not list sources of the package '"
++ display (packageName pkg) ++ "'."
warn verbosity $
"Exception was: " ++ show e
-- Run setup sdist --list-sources=TMPFILE
r <- doListSources `catchIO` (\e -> onFailedListSources e >> return [])
-- Ensure that we've closed the 'readFile' handle before we exit the
-- temporary directory.
_ <- evaluate (length r)
return r
| thomie/cabal | cabal-install/Distribution/Client/SrcDist.hs | bsd-3-clause | 8,259 | 0 | 20 | 1,870 | 1,637 | 878 | 759 | 131 | 6 |
-- #1814
module ShouldFail where
import Tcfail186_Help
foo = f "hoo" | sdiehl/ghc | testsuite/tests/typecheck/should_fail/tcfail186.hs | bsd-3-clause | 71 | 0 | 5 | 13 | 16 | 10 | 6 | 3 | 1 |
{-# OPTIONS -Wall #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Maybe.Extra where
-- | When a value is Just, do something with it, monadically.
whenJust :: Monad m => Maybe a -> (a -> m c) -> m ()
whenJust (Just a) m = m a >> return ()
whenJust _ _ = return ()
| plow-technologies/ircbrowse | src/Data/Maybe/Extra.hs | bsd-3-clause | 276 | 0 | 10 | 64 | 91 | 46 | 45 | 6 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
module T15572 where
import Language.Haskell.TH
$([d| type AbsoluteUnit1 = '() |])
$(pure [TySynD (mkName "AbsoluteUnit2") [] (ConT '())])
| sdiehl/ghc | testsuite/tests/th/T15572.hs | bsd-3-clause | 200 | 0 | 11 | 28 | 60 | 35 | 25 | 6 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
module Expr where
import Data.Data
import Data.Typeable
import Language.Haskell.TH as TH
import Language.Haskell.TH.Quote
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Char
data Expr = IntExpr Integer
| AntiIntExpr String
| BinopExpr BinOp Expr Expr
| AntiExpr String
deriving(Typeable, Data)
data BinOp = AddOp
| SubOp
| MulOp
| DivOp
deriving(Typeable, Data)
eval :: Expr -> Integer
eval (IntExpr n) = n
eval (BinopExpr op x y) = (opToFun op) (eval x) (eval y)
where
opToFun AddOp = (+)
opToFun SubOp = (-)
opToFun MulOp = (*)
opToFun DivOp = (div)
small :: CharParser st Char
small = lower <|> char '_'
large = upper
idchar = small <|> large <|> digit <|> char '\''
lexeme p = do{ x <- p; spaces; return x }
symbol name = lexeme (string name)
parens p = between (symbol "(") (symbol ")") p
_expr :: CharParser st Expr
_expr = term `chainl1` mulop
term :: CharParser st Expr
term = factor `chainl1` addop
factor :: CharParser st Expr
factor = parens _expr <|> integer <|> anti
mulop = do{ symbol "*"; return $ BinopExpr MulOp }
<|> do{ symbol "/"; return $ BinopExpr DivOp }
addop = do{ symbol "+"; return $ BinopExpr AddOp }
<|> do{ symbol "-"; return $ BinopExpr SubOp }
integer :: CharParser st Expr
integer = lexeme $ do{ ds <- many1 digit ; return $ IntExpr (read ds) }
anti = lexeme $
do symbol "$"
c <- small
cs <- many idchar
return $ AntiIntExpr (c : cs)
parseExpr :: Monad m => TH.Loc -> String -> m Expr
parseExpr (Loc {loc_filename = file, loc_start = (line,col)}) s =
case runParser p () "" s of
Left err -> fail $ show err
Right e -> return e
where
p = do pos <- getPosition
setPosition $ setSourceName (setSourceLine (setSourceColumn pos col) line) file
spaces
e <- _expr
eof
return e
expr = QuasiQuoter { quoteExp = parseExprExp, quotePat = parseExprPat,
quoteType = undefined, quoteDec = undefined }
parseExprExp :: String -> Q Exp
parseExprExp s = do loc <- location
expr <- parseExpr loc s
dataToExpQ (const Nothing `extQ` antiExprExp) expr
antiExprExp :: Expr -> Maybe (Q Exp)
antiExprExp (AntiIntExpr v) = Just $ appE (conE (mkName "IntExpr"))
(varE (mkName v))
antiExprExp (AntiExpr v) = Just $ varE (mkName v)
antiExprExp _ = Nothing
parseExprPat :: String -> Q Pat
parseExprPat s = do loc <- location
expr <- parseExpr loc s
dataToPatQ (const Nothing `extQ` antiExprPat) expr
antiExprPat :: Expr -> Maybe (Q Pat)
antiExprPat (AntiIntExpr v) = Just $ conP (mkName "IntExpr")
[varP (mkName v)]
antiExprPat (AntiExpr v) = Just $ varP (mkName v)
antiExprPat _ = Nothing
-- Copied from syb for the test
-- | Extend a generic query by a type-specific case
extQ :: ( Typeable a
, Typeable b
)
=> (a -> q)
-> (b -> q)
-> a
-> q
extQ f g a = maybe (f a) g (cast a)
| ezyang/ghc | testsuite/tests/quasiquotation/qq006/Expr.hs | bsd-3-clause | 3,391 | 0 | 14 | 1,109 | 1,180 | 605 | 575 | 88 | 4 |
module A061b where
| urbanslug/ghc | testsuite/tests/driver/A061b.hs | bsd-3-clause | 19 | 0 | 2 | 3 | 4 | 3 | 1 | 1 | 0 |
module Helpers
( getSym
, getKeyState
, getModifiers
, emptyStackSet
, singleton'
, modToWLCMod
) where
import Data.Bits
import qualified Data.Dependent.Map as DMap
import Data.Dependent.Sum
import Data.GADT.Compare
import Data.Set hiding (filter)
import EmacsKeys
import Foreign.C.Types
import Text.XkbCommon
import WLC
import LayoutType
import StackSet
import Tree
getSym :: CUInt -> Keysym
getSym sym = Keysym (fromIntegral sym)
getKeyState :: WLCKeyStateBit -> WLCKeyState
getKeyState b = toEnum (fromIntegral b)
getModifiers :: WLCModifiers -> Set WLCModifier
getModifiers (WLCModifiers _ mods) =
fromList (filter (\modifier ->
mods .&.
fromIntegral (fromEnum modifier) /=
0)
(enumFrom WlcBitModShift))
emptyStackSet :: StackSet String a sid
emptyStackSet =
StackSet Nothing
[]
(fmap (\i ->
(Workspace (show i)
(2 ^ i)
(TreeZipper (Tree horizontalLayout Nothing)
[])))
[0 :: Int .. 1])
-- | generate singleton map from dsum.
singleton' :: GCompare k => DSum k -> DMap.DMap k
singleton' = DMap.fromList . (:[])
modToWLCMod :: Modifier -> WLCModifier
modToWLCMod Shift = WlcBitModShift
modToWLCMod Meta = WlcBitModAlt
modToWLCMod Ctrl = WlcBitModCtrl
| cocreature/reactand | src/Helpers.hs | isc | 1,539 | 0 | 15 | 545 | 376 | 207 | 169 | 46 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Utils (
encode
, ioLogging
, makeContext
, packetLogging
, tlsParams
, Context
, HostName
, PortNumber
) where
import Data.Default.Class (def)
import qualified Data.ByteString.Lazy as BSL
import qualified Crypto.Random.AESCtr as AESCtr
import qualified Network.IRC as IRC
import BasePrelude
import Network.BSD
import Network.Socket
import Network.TLS
import Network.TLS.Extra.Cipher
ciphers :: [Cipher]
ciphers =
[ cipher_DHE_RSA_AES256_SHA256
, cipher_DHE_RSA_AES128_SHA256
, cipher_DHE_RSA_AES128GCM_SHA256
, cipher_ECDHE_RSA_AES128GCM_SHA256
, cipher_AES256_SHA256
, cipher_AES128_SHA256
]
tlsParams :: HostName -> ClientParams
tlsParams host =
(defaultParamsClient host "")
{ clientSupported = def
{ supportedVersions = [TLS10, TLS12]
, supportedCiphers = ciphers
}
, clientWantSessionResume = Nothing
, clientUseServerNameIndication = True
, clientShared = def { sharedValidationCache = vcache }
}
where
vcache =
ValidationCache (\_ _ _ -> return ValidationCachePass) (\_ _ _ -> return ())
packetLogging :: Logging -> Logging
packetLogging logging =
logging { loggingPacketSent = putStrLn . ("debug: >> " ++)
, loggingPacketRecv = putStrLn . ("debug: << " ++)
}
ioLogging :: Logging -> Logging
ioLogging logging =
logging { loggingIOSent = putStrLn . ("io: >> " ++) . show
, loggingIORecv = \hdr -> putStrLn . (("io: << " ++ show hdr ++ " ") ++) . show
}
makeContext :: HostName -> PortNumber -> IO Context
makeContext hostname port = do
prg <- AESCtr.makeSystem
he <- getHostByName hostname
sock <- socket AF_INET Stream defaultProtocol
let sockaddr = SockAddrInet port (head $ hostAddresses he)
catch (connect sock sockaddr) (\(e :: SomeException) -> error (show e))
contextNew sock (tlsParams hostname) prg
encode :: IRC.Message -> BSL.ByteString
encode = BSL.fromChunks . return . (<> "\r\n") . IRC.encode
| hlian/thomas-hauk | src/Utils.hs | mit | 2,041 | 0 | 14 | 445 | 545 | 311 | 234 | 56 | 1 |
module Nunavut.PropogationSpec where
import Test.Hspec
import Nunavut.Propogation
import Nunavut.Util.Arbitrary ()
import Nunavut.Util.TestUtils
spec :: Spec
spec =
describe "PropData" $
validMonoid (undefined :: PropData)
| markcwhitfield/nunavut | test/Nunavut/PropogationSpec.hs | mit | 232 | 0 | 7 | 33 | 57 | 34 | 23 | 9 | 1 |
-- :l C:\Local\Dev\haskell\learn_chapter4.hs
-- Syntax in Functions (Sintaxe em funções)
-- Pattern matching (correspondência de padrão)
lucky :: (Integral a) => a -> String
lucky 7 = "LUCKY NUMBER SEVEN!"
lucky x = "Sorry, you're out of luck, pal!"
-- A função lucky aceita um argumento que pertence à Typeclass Integral (Int, Integer) e retorna uma String
-- Se lucky for chamada com o argumento 7, esta versão é executada devido ao padrão
-- Nos demais casos esta é a versão chamada
sayMe :: (Integral a) => a -> String
sayMe 1 = "One!"
sayMe 2 = "Two!"
sayMe 3 = "Three!"
sayMe 4 = "Four!"
sayMe 5 = "Five!"
sayMe x = "Not between 1 and 5"
-- A ordem dos padrões é importante. Os mais genéricos devem vir por último.
-- Uma definição recursiva de fatorial
factorial :: (Integral a) => a -> a
factorial 0 = 1
factorial n = if n > 0 then n * factorial (n - 1) else error "No factorial for negative numbers"
-- No tutorial não eram verificados valores negativos
charName :: Char -> String
charName 'a' = "Alice"
charName 'b' = "Bob"
charName 'c' = "Charlie"
-- Por não ter um caso genérico, ao chamar charName 'd' ocorre uma exceção:
-- *** Exception: ... Non-exhaustive patterns in function charName
-- Duas formas de fazer a correspondência de padrão com tuplas
addVectors1 :: (Num a) => (a, a) -> (a, a) -> (a, a)
addVectors1 a b = (fst a + fst b, snd a + snd b)
addVectors :: (Num a) => (a, a) -> (a, a) -> (a, a)
addVectors (x1, y1) (x2, y2) = (x1 + x2, y1 + y2)
first :: (a, b, c) -> a
first (x,_,_) = x
second :: (a, b, c) -> b
second (_,y,_) = y
third :: (a, b, c) -> c
third (_,_,z) = z
head' :: [a] -> a
head' [] = error "Can't call head on an empty list, dummy!"
head' (x:_) = x
-- Ao fazer matching the listas, é possível isolar o primeiro elemento, mas é obrigatório usar parênteses neste caso por conta do :_
tell :: (Show a) => [a] -> String
tell [] = "The list is empty"
tell (x:[]) = "The list has one element: " ++ show x
tell (x:y:[]) = "The list has two elements: " ++ show x ++ " and " ++ show y
tell (x:y:_) = "The list is long. The first two elements are: " ++ show x ++ " and " ++ show y
-- (x:[]) poderia ser escrito como [x]
-- (x:y:[]) poderia ser escrito como [x,y]
-- (x:y:_) não poderia ser escrito de outra forma, devido ao :_
-- Uma versão recursiva de length
length' :: (Num b) => [a] -> b
length' [] = 0
length' (_:xs) = 1 + length' xs
-- Esse exemplo ilustra bem que todos os tipos possíveis de lista foram considerados
-- Já que [] é a lista vazia e _:xs é uma lista com um ou mais elementos
sum' :: (Num a) => [a] -> a
sum' [] = 0
sum' (x:xs) = x + sum' xs
capital :: String -> String
capital "" = "Empty string, whoops!"
capital all@(x:xs) = "The first letter of " ++ all ++ " is " ++ [x]
bmiTell :: (RealFloat a) => a -> String
bmiTell bmi
| bmi <= 18.5 = "You're underweight."
| bmi <= 25.0 = "You're normal weighted."
| bmi <= 30.0 = "You're overweight."
| otherwise = "You're obese."
bmiTell2 :: (RealFloat a) => a -> a -> String
bmiTell2 weight height
| weight / height ^ 2 <= 18.5 = "You're underweight."
| weight / height ^ 2 <= 25.0 = "You're normal weighted."
| weight / height ^ 2 <= 30.0 = "You're overweight."
| otherwise = "You're obese."
max' :: (Ord a) => a -> a -> a
max' a b
| a > b = a
| otherwise = b
myCompare :: (Ord a) => a -> a -> Ordering
a `myCompare` b
| a > b = GT
| a < b = LT
| otherwise = EQ
bmiTell3 :: (RealFloat a) => a -> a -> String
bmiTell3 weight height
| bmi <= 18.5 = "You're underweight."
| bmi <= 25.0 = "You're normal weighted."
| bmi <= 30.0 = "You're overweight."
| otherwise = "You're obese."
where bmi = weight / height ^ 2
bmiTell4 :: (RealFloat a) => a -> a -> String
bmiTell4 weight height
| bmi <= under = "You're underweight."
| bmi <= normal = "You're normal weighted."
| bmi <= over = "You're overweight."
| otherwise = "You're obese."
where bmi = weight / height ^ 2
under = 18.5
normal = 25.0
over = 30.0
bmiTell5 :: (RealFloat a) => a -> a -> String
bmiTell5 weight height
| bmi <= under = "You're underweight."
| bmi <= normal = "You're normal weighted."
| bmi <= over = "You're overweight."
| otherwise = "You're obese."
where bmi = weight / height ^ 2
(under, normal, over) = (18.5, 25.0, 30.0)
-- bmiTell5 109 1.83
initials :: String -> String -> String
initials firstname lastname = [f] ++ ". " ++ [l] ++ "."
where (f:_) = firstname
(l:_) = lastname
-- Alternative: initials (f:_) (l:_) = [f] ++ ". " ++ [l] ++ "."
-- initials "Felipo" "Soranz"
calcBmis :: (RealFloat a) => [(a, a)] -> [a]
calcBmis xs = [ bmi w h | (w, h) <- xs ]
where bmi weight height = weight / height ^ 2
-- calcBmis [(109, 1.83), (85, 1.90)]
cylinder :: (RealFloat a) => a -> a -> a
cylinder r h =
let sideArea = 2 * pi * r * h; topArea = pi * r ^ 2
in sideArea + 2 * topArea
calcBmis2 :: (RealFloat a) => [(a, a)] -> [a]
calcBmis2 xs = [ bmi | (w, h) <- xs, let bmi = w / h ^ 2 ]
calcBmisFat :: (RealFloat a) => [(a, a)] -> [a]
calcBmisFat xs = [ bmi | (w, h) <- xs, let bmi = w / h ^ 2, bmi > 25.0 ]
headCase :: [a] -> a
headCase xs = case xs of [] -> error "Empty list has no head."
(x:_) -> x
describeList :: [a] -> String
describeList xs = "The list is " ++ case xs of [] -> "empty."
[x] -> "a singleton list."
xs -> "a longer list."
describeList' :: [a] -> String
describeList' xs = "The list is " ++ what xs
where what [] = "empty."
what [x] = "a singleton list."
what xs = "a longer list."
| feliposz/learning-stuff | haskell/learn_chapter4.hs | mit | 5,759 | 0 | 11 | 1,472 | 1,946 | 1,033 | 913 | 117 | 3 |
module RL.UI.Sprite (
gameSprites,
SpriteEnv(..),
Sprite(..),
SpriteAttr(..),
Color,
toMessage
) where
import RL.Game
import RL.Player
import RL.UI.Common
import RL.Util (enumerate, equating, groupBy')
import Data.Maybe (catMaybes, fromJust, isJust, listToMaybe)
import qualified Data.List as L
import qualified Data.Map as M
white = (255, 255, 255)
grey = (200, 200, 200)
dgrey = (125, 125, 125)
black = (0, 0, 0)
purple = (204,0,204)
green = (0,204,0)
yellow = (255,255,0)
red = (255,0,0)
blue = (0, 128, 255)
brown = (153, 76, 0)
orange = (255, 128, 0)
lyellow = (255, 255, 204)
data SpriteEnv = SpriteEnv { spriteGame :: Env,
spriteIS :: InputState,
spriteSeen :: [Point] }
spriteLevel = level . spriteGame
gameSprites :: SpriteEnv -> [Sprite]
gameSprites env = getMapSprites env ++ getMsgSprites (spriteGame env) ++ getStatusSprites (spriteLevel env) ++ inputSprites env
inputSprites :: SpriteEnv -> [Sprite]
inputSprites env =
case menu (spriteIS env) of
Just TargetMenu -> maybe [] targetMenu (target (spriteIS env))
Just Inventory -> inventoryMenu
Just ProjectileMenu -> inventoryMenu
otherwise -> []
where targetMenu p = [CharSprite p '*' (SpriteAttr red black)]
inventoryMenu =
let lvl = spriteLevel env
inv = groupItems (inventory (player lvl))
eq = groupItems (equipmentToList (equipment (player lvl)))
showInvItem (ch, i) = ch:(showItem i)
p = player lvl
showItem (1,i) = " - " ++ showIdentified (identified p) i
showItem (n,i) = " - " ++ show n ++ " " ++ showIdentified (identified p) i ++ "s" -- TODO pluralize
in mkMessages (0, 0) ([ "Inventory:", " " ] ++ map showInvItem (zip inventoryLetters inv)) ++
mkMessages (40, 0) ([ "Equipped:", " " ] ++ map showItem eq)
spriteAt :: SpriteEnv -> Point -> Sprite
spriteAt env p = if canPlayerSee p then tileOrMobSprite lvl p
else seenTileSprite lvl p
where
lvl = spriteLevel env
canPlayerSee p = canSee lvl (player lvl) p || canSense lvl (player lvl) p
tileColor Floor = white
tileColor Cavern = grey
tileColor Rock = grey
tileColor (StairUp _) = white
tileColor (StairDown _) = white
mobColor "Kobold" = purple
mobColor "Goblin" = green
mobColor "Grid Bug" = purple
mobColor "Orc" = yellow
mobColor "Zombie" = dgrey
mobColor "Rat" = brown
mobColor otherwise = white
itemColor (Item "Blue" (Potion _)) = blue
itemColor (Item "Yellow" (Potion _)) = yellow
itemColor (Item "Black" (Potion _)) = dgrey
itemColor (Item "Red" (Potion _)) = red
itemColor (Item "White" (Potion _)) = white
itemColor (Item "Green" (Potion _)) = green
itemColor (Item "Orange" (Potion _)) = orange
itemColor (Item "Leather Armor" (Armor _)) = brown
itemColor (Item "Plate Mail" (Armor _)) = white
itemColor (Item "Full Plate" (Armor _)) = white
itemColor (Item "Small Shield" (Armor _)) = grey
itemColor (Item "Tower Shield" (Armor _)) = white
itemColor (Item n (Armor _)) = grey
itemColor (Item "Quarterstaff" (Weapon _)) = brown
itemColor (Item "Bow" (Weapon _)) = brown
itemColor (Item "Arrow" (Weapon _)) = brown
itemColor (Item "Dagger" (Weapon _)) = dgrey
itemColor (Item "Mace" (Weapon _)) = grey
itemColor (Item "Ornate Sword" (Weapon _)) = yellow
itemColor (Item n (Weapon _)) = lyellow
itemColor (Item n (Scroll _)) = white
itemColor otherwise = white
featureColor (Chest _) = yellow
featureColor (Fountain 0) = grey
featureColor (Fountain _) = blue
featureColor Altar = grey
tileSprite :: DLevel -> (Int, Int) -> Maybe Sprite
tileSprite lvl p = case findTileAt p lvl of
Nothing -> Nothing
Just t ->
let charSpr = CharSprite p (fromTile t) (SpriteAttr (tileColor t) black)
wallSpr w = WallSprite p w (SpriteAttr (tileColor t) black)
in Just $ maybe charSpr wallSpr (seenWallType env p)
itemSprite :: DLevel -> (Int, Int) -> Maybe Sprite
itemSprite lvl p = case findItemsAt p lvl of
(i:_) -> Just (CharSprite p (itemSymbol i) (SpriteAttr (itemColor i) black))
[] -> Nothing
mobSprite :: DLevel -> (Int, Int) -> Maybe Sprite
mobSprite lvl p = case findTileOrMob p lvl of
Right m -> if isVisible m then
Just (CharSprite p (symbol m) (SpriteAttr (mobColor (mobName m)) black))
else if isPlayer m then
Just (CharSprite p ' ' (SpriteAttr white (50,50,50)))
else
Nothing
Left _ -> Nothing
featureSprite :: DLevel -> (Int, Int) -> Maybe Sprite
featureSprite lvl p = case L.lookup p (features lvl) of
Just f -> Just (CharSprite p (fromFeature f) (SpriteAttr (featureColor f) black))
Nothing -> Nothing
tileOrMobSprite :: DLevel -> (Int, Int) -> Sprite
tileOrMobSprite lvl p = let sprites = [mobSprite lvl p, featureSprite lvl p, itemSprite lvl p, tileSprite lvl p]
sprite = listToMaybe (catMaybes sprites)
in if isJust sprite then fromJust sprite
else CharSprite p ' ' (SpriteAttr black black)
seenTileSprite lvl p = if p `elem` spriteSeen env then stale (fromJust (listToMaybe (catMaybes [featureSprite lvl p, itemSprite lvl p, tileSprite lvl p])))
else CharSprite p ' ' (SpriteAttr black black)
stale (CharSprite p c _) = CharSprite p c (SpriteAttr dgrey black)
stale (MessageSprite p c _) = MessageSprite p c (SpriteAttr dgrey black)
stale (WallSprite p c _) = WallSprite p c (SpriteAttr dgrey black)
getMapSprites :: SpriteEnv -> [Sprite]
getMapSprites env = map (spriteAt env . fst) . M.toList $ tiles (spriteLevel env)
getStatusSprites :: DLevel -> [Sprite]
getStatusSprites lvl =
let p = player lvl
hpSprite = (MessageSprite (64, 15) (show (hp p)) (SpriteAttr hpColor black))
hpPercent = fromIntegral (hp p) / fromIntegral (mhp p)
hpColor = if hpPercent >= 1.0 then white
else if hpPercent >= 0.7 then green
else if hpPercent >= 0.4 then yellow
else red
in [ mkMessage (60, 15) "HP: ", hpSprite, mkMessage (66, 15) ("/" ++ show (mhp p)),
mkMessage (60, 16) ("Depth: " ++ show (depth lvl)) ]
getMsgSprites :: Env -> [Sprite]
getMsgSprites env = let evs = events env
recentMsgs = catMaybes (map (toMessage env) (getEventsAfterTurns 2 evs))
staleMsgs = catMaybes (map (toMessage env) (getEventsAfterTurns 11 (getEventsBeforeTurns 2 evs)))
msgs = zip recentMsgs (repeat white) ++ zip staleMsgs (repeat grey)
in mkColoredMessages (0, 15) . reverse . take 9 $ msgs
mkMessages :: Point -> [String] -> [Sprite]
mkMessages (offx, offy) = map toSprite . enumerate
where
toSprite (i, s) = MessageSprite (offx, i + offy) s (SpriteAttr white black)
mkColoredMessages :: Point -> [(String, Color)] -> [Sprite]
mkColoredMessages (offx, offy) = map toSprite . enumerate
where
toSprite (i, (s, fg)) = MessageSprite (offx, i + offy) s (SpriteAttr fg black)
mkMessage :: Point -> String -> Sprite
mkMessage xy s = MessageSprite xy s (SpriteAttr white black)
wallHasE WallNE = True
wallHasE WallNSE = True
wallHasE WallNEW = True
wallHasE WallEW = True
wallHasE WallSE = True
wallHasE WallSEW = True
wallHasE WallNESW = True
wallHasE otherwise = False
wallHasW WallNW = True
wallHasW WallNSW = True
wallHasW WallNEW = True
wallHasW WallEW = True
wallHasW WallSW = True
wallHasW WallSEW = True
wallHasW WallNESW = True
wallHasW otherwise = False
wallHasN WallNESW = True
wallHasN t = t <= WallNEW && t > Wall
wallHasS WallNS = True
wallHasS WallNSE = True
wallHasS WallNSW = True
wallHasS t = t <= WallNESW && t >= WallSE
-- which part of the wall is seen
seenWallType :: SpriteEnv -> Point -> Maybe WallType
seenWallType env (x,y) =
let lvl = spriteLevel env
f p' = maybe False (not . isWall) (findTileAt p' lvl) && p' `elem` (spriteSeen env)
fixWall Wall = if ((x+1),y) `elem` (spriteSeen env) || ((x-1),y) `elem` (spriteSeen env) then WallEW
else if (x,y+1) `elem` (spriteSeen env) || (x,y-1) `elem` (spriteSeen env) then WallNS
else Wall
fixWall t = t
in fixWall <$> filterWallType f (x,y) <$> wallType lvl (x,y)
filterWallType :: (Point -> Bool) -> Point -> WallType -> WallType
filterWallType f (x,y) t =
let ne = (x+1,y-1)
nw = (x-1,y-1)
se = (x+1,y+1)
sw = (x-1,y+1)
north = (x, y-1)
south = (x, y+1)
west = (x-1,y)
east = (x+1,y)
in if t == WallNESW && (f ne || f nw) && (f se || f sw) then t
else if wallHasN t && wallHasE t && wallHasW t && ((f ne && f nw) || ((f ne || f nw) && f south)) then WallNEW
else if wallHasS t && wallHasE t && wallHasW t && ((f se && f sw) || ((f se || f sw) && f north)) then WallSEW
else if wallHasN t && wallHasS t && wallHasE t && ((f ne && f se) || ((f ne || f se) && f west)) then WallNSE
else if wallHasN t && wallHasS t && wallHasW t && ((f nw && f sw) || ((f nw || f sw) && f east)) then WallNSW
else if wallHasS t && wallHasW t && (f sw || (f east && f north)) then WallSW
else if wallHasS t && wallHasE t && (f se || (f west && f north)) then WallSE
else if wallHasN t && wallHasW t && (f nw || (f east && f south)) then WallNW
else if wallHasN t && wallHasE t && (f ne || (f west && f south)) then WallNE
else if (wallHasN t || wallHasS t) && (f west || f east) then WallNS
else if (wallHasW t || wallHasE t) && (f north || f south) then WallEW
else Wall
-- wall type for different wall tiles
wallType :: DLevel -> Point -> Maybe WallType
wallType lvl p =
if not (maybe False isWall (findTileAt p lvl)) then Nothing
else if wallN lvl p && wallS lvl p && wallE lvl p && wallW lvl p then Just WallNESW
else if wallN lvl p && wallS lvl p && wallE lvl p then Just WallNSE
else if wallN lvl p && wallS lvl p && wallW lvl p then Just WallNSW
else if wallN lvl p && wallW lvl p && wallE lvl p then Just WallNEW
else if wallS lvl p && wallW lvl p && wallE lvl p then Just WallSEW
else if wallN lvl p && wallS lvl p then Just WallNS
else if wallW lvl p && wallS lvl p then Just WallSW
else if wallE lvl p && wallS lvl p then Just WallSE
else if wallW lvl p && wallN lvl p then Just WallNW
else if wallE lvl p && wallN lvl p then Just WallNE
else if wallE lvl p then Just WallEW
else if wallW lvl p then Just WallEW
else if wallS lvl p then Just WallNS
else if wallN lvl p then Just WallNS
else Just Wall
wallN :: DLevel -> Point -> Bool
wallN lvl (x,y) = maybe False isWall (findTileAt (x, y - 1) lvl)
wallE :: DLevel -> Point -> Bool
wallE lvl (x,y) = maybe False isWall (findTileAt (x + 1, y) lvl)
wallS :: DLevel -> Point -> Bool
wallS lvl (x,y) = maybe False isWall (findTileAt (x, y + 1) lvl)
wallW :: DLevel -> Point -> Bool
wallW lvl (x,y) = maybe False isWall (findTileAt (x - 1, y) lvl)
isWall :: Tile -> Bool
isWall Rock = True
isWall otherwise = False
toMessage :: Env -> Event -> Maybe String
toMessage e (GameUpdate NewGame) = Just $ "You delve underground, searching for your ancestors' sword."
toMessage e (GameUpdate (Escaped)) = Just $ "There is no escape. You must avenge your ancestors!"
toMessage e (GameUpdate (Crit attacker target))
| isPlayer attacker = Just $ "CRITICAL HIT!"
toMessage e (GameUpdate (Damaged attacker target dmg))
| isPlayer attacker && isPlayer target = Just $ "You hurt yourself for " ++ show dmg ++ " damage! Be more careful!"
| isPlayer attacker = Just $ "You hit the " ++ mobName target ++ " for " ++ show dmg ++ " damage"
| isPlayer target = Just $ "You were hit by the " ++ mobName attacker ++ " for " ++ show dmg
| otherwise = Just $ "The " ++ mobName attacker ++ " hit the " ++ mobName target ++ " for " ++ show dmg
toMessage e (GameUpdate (Missed attacker target))
| isPlayer attacker = Just $ "You missed the " ++ mobName target
| isPlayer target = Just $ "The " ++ mobName attacker ++ " missed"
| otherwise = Just $ "The " ++ mobName attacker ++ " missed the " ++ mobName target
toMessage e (GameUpdate (Died m))
| isPlayer m = Just $ "You died! Press space to quit or r to restart a new game."
| otherwise = Just $ "You killed the " ++ mobName m
toMessage e (GameUpdate (StairsTaken Up _)) = Just $ "You've gone up stairs."
toMessage e (GameUpdate (StairsTaken Down _)) = Just $ "You've gone down stairs."
toMessage e (GameUpdate (Waken m)) | canSee (level e) (player (level e)) (at m) = Just $ "The " ++ mobName m ++ " wakes up from their slumber."
toMessage e (GameUpdate (Slept m)) = Just $ "The " ++ mobName m ++ " has fallen asleep."
toMessage e (EventMessage (StairsSeen Up)) = Just $ "You see stairs going up."
toMessage e (EventMessage (StairsSeen Down)) = Just $ "You see stairs going down."
toMessage e (EventMessage (ItemsSeen items)) = let suffix = if length items > 1 then "There are " ++ show (length items - 1) ++ " more items here." else ""
in Just $ "You see a " ++ showIdentified (identified (player (level e))) (head items) ++ ". " ++ suffix
toMessage e (EventMessage (MenuChange Inventory)) = Just $ "Pick an item to use or equip. Press space to cancel."
toMessage e (EventMessage (MenuChange ProjectileMenu)) = Just $ "Pick a projectile to throw. Press space to cancel."
toMessage e (EventMessage (MenuChange TargetMenu)) = Just $ "Pick a target to fire at. Press r to ready something else, space to cancel."
toMessage e (EventMessage InMelee) = Just $ "You are unable to concentrate on firing within the melee."
toMessage e (EventMessage (Readied i)) = Just $ "You have readied the " ++ show i
toMessage e (GameUpdate (ItemPickedUp m item)) | isPlayer m = Just $ "You have picked up a " ++ showIdentified (identified (player (level e))) item ++ "."
toMessage e (GameUpdate (Equipped m item)) | isPlayer m = Just $ "You have equipped up the " ++ showIdentified (identified (player (level e))) item ++ "."
toMessage e (GameUpdate (EquipmentRemoved m item)) | isPlayer m = Just $ "You have removed the " ++ showIdentified (identified (player (level e))) item ++ "."
toMessage e (GameUpdate (Drank m p)) | isPlayer m = Just $ "You drank the " ++ show p ++ "."
toMessage e (GameUpdate (Healed m n)) | isPlayer m = Just $ "You were healed of " ++ show n ++ " points of damage."
toMessage e (GameUpdate (GainedLife m n)) | isPlayer m = Just $ "Praise the sun! You feel youthful."
toMessage e (GameUpdate (GainedStrength m n)) | isPlayer m = Just $ "You feel empowered!"
toMessage e (GameUpdate (DrankAcid m )) | isPlayer m = Just $ "It BURNS!"
toMessage e (GameUpdate (GainedMobFlag m Invisible)) | isPlayer m = Just $ "You can no longer see yourself!"
toMessage e (GameUpdate (GainedMobFlag m ConfusedF)) | isPlayer m = Just $ "You feel drunk."
toMessage e (GameUpdate (GainedMobFlag m BlindedF)) | isPlayer m = Just $ "You can no longer see your surroundings!"
toMessage e (GameUpdate (GainedMobFlag m Sleeping)) | isPlayer m = Just $ "You fell asleep."
toMessage e (GameUpdate (GainedMobFlag m TelepathicF)) | isPlayer m = Just $ "You sense nearby danger."
toMessage e (GameUpdate (GainedMobFlag m (MappedF _))) | isPlayer m = Just $ "You suddenly understand the layout of the current level."
toMessage e (GameUpdate (RemovedMobFlag m Invisible)) | isPlayer m = Just $ "You can see yourself again."
toMessage e (GameUpdate (RemovedMobFlag m ConfusedF)) | isPlayer m = Just $ "You feel sober."
toMessage e (GameUpdate (RemovedMobFlag m BlindedF)) | isPlayer m = Just $ "You are no longer blind."
toMessage e (GameUpdate (RemovedMobFlag m Sleeping)) | isPlayer m = Just $ "You wake up."
toMessage e (GameUpdate (RemovedMobFlag m TelepathicF)) | isPlayer m = Just $ "You stop sensing danger."
toMessage e (GameUpdate (RemovedMobFlag m (MappedF _))) | isPlayer m = Just $ "You feel forgetful."
toMessage e (GameUpdate (Read m s)) | isPlayer m = Just $ "You read the " ++ show s ++ "."
toMessage e (GameUpdate (CastFire m n)) | isPlayer m = Just $ "Roaring flames erupt all around you!"
toMessage e (GameUpdate (CastLightning m n)) | isPlayer m = Just $ "KABOOM! Lightning strikes everything around you."
toMessage e (GameUpdate (Teleported m p)) | isPlayer m = Just $ "You feel disoriented."
toMessage e (GameUpdate (ThrownProjectile m i _)) | isPlayer m = Just $ "You throw the " ++ show i ++ "."
toMessage e (GameUpdate (FiredProjectile m l p _)) | isPlayer m = Just $ "You fire the " ++ show p ++ " out of your " ++ show l ++ "."
toMessage e (GameUpdate (BandageApplied m)) | isPlayer m = Just $ "You apply the bandage."
toMessage e (GameUpdate (FeatureInteracted p (Fountain 0))) = Just $ "The fountain has run dry!"
toMessage e (GameUpdate (FeatureInteracted p (Fountain n))) = Just $ "You drink from the fountain."
toMessage e (GameUpdate (FeatureInteracted p (Chest is))) = Just $ "You open the chest! There are " ++ show (length is) ++ " items."
toMessage e (GameUpdate (FeatureInteracted p Altar)) = Just $ "You pray to the gods."
toMessage e otherwise = Nothing
| MichaelMackus/hsrl | RL/UI/Sprite.hs | mit | 18,789 | 0 | 21 | 5,571 | 6,989 | 3,512 | 3,477 | 297 | 46 |
{-# LANGUAGE OverloadedLists, ViewPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Algebra.Graph.Test.Bipartite.AdjacencyMap
-- Copyright : (c) Andrey Mokhov 2016-2022
-- License : MIT (see the file LICENSE)
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Testsuite for "Algebra.Graph.Bipartite.AdjacencyMap".
-----------------------------------------------------------------------------
module Algebra.Graph.Test.Bipartite.AdjacencyMap (
-- * Testsuite
testBipartiteAdjacencyMap,
testBipartiteAdjacencyMapAlgorithm
) where
import Algebra.Graph.Bipartite.AdjacencyMap
import Algebra.Graph.Bipartite.AdjacencyMap.Algorithm
import Algebra.Graph.Test
import Data.Either
import Data.Either.Extra
import Data.List (nub, sort)
import Data.Map.Strict (Map)
import Data.Set (Set)
import qualified Algebra.Graph.AdjacencyMap as AM
import qualified Algebra.Graph.Bipartite.AdjacencyMap as B
import qualified Data.Bifunctor as Bifunctor
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Tuple
import qualified Algebra.Graph.Bipartite.AdjacencyMap as B
type AI = AM.AdjacencyMap Int
type AII = AM.AdjacencyMap (Either Int Int)
type BAII = AdjacencyMap Int Int
type MII = Matching Int Int
type MIC = Matching Int Char
type LII = List Int Int
testBipartiteAdjacencyMap :: IO ()
testBipartiteAdjacencyMap = do
-- Help with type inference by shadowing overly polymorphic functions
let consistent :: BAII -> Bool
consistent = B.consistent
show :: BAII -> String
show = Prelude.show
leftAdjacencyMap :: BAII -> Map Int (Set Int)
leftAdjacencyMap = B.leftAdjacencyMap
rightAdjacencyMap :: BAII -> Map Int (Set Int)
rightAdjacencyMap = B.rightAdjacencyMap
leftAdjacencyList :: BAII -> [(Int, [Int])]
leftAdjacencyList = B.leftAdjacencyList
rightAdjacencyList :: BAII -> [(Int, [Int])]
rightAdjacencyList = B.rightAdjacencyList
empty :: BAII
empty = B.empty
vertex :: Either Int Int -> BAII
vertex = B.vertex
leftVertex :: Int -> BAII
leftVertex = B.leftVertex
rightVertex :: Int -> BAII
rightVertex = B.rightVertex
edge :: Int -> Int -> BAII
edge = B.edge
isEmpty :: BAII -> Bool
isEmpty = B.isEmpty
hasLeftVertex :: Int -> BAII -> Bool
hasLeftVertex = B.hasLeftVertex
hasRightVertex :: Int -> BAII -> Bool
hasRightVertex = B.hasRightVertex
hasVertex :: Either Int Int -> BAII -> Bool
hasVertex = B.hasVertex
hasEdge :: Int -> Int -> BAII -> Bool
hasEdge = B.hasEdge
vertexCount :: BAII -> Int
vertexCount = B.vertexCount
edgeCount :: BAII -> Int
edgeCount = B.edgeCount
vertices :: [Int] -> [Int] -> BAII
vertices = B.vertices
edges :: [(Int, Int)] -> BAII
edges = B.edges
overlays :: [BAII] -> BAII
overlays = B.overlays
connects :: [BAII] -> BAII
connects = B.connects
swap :: BAII -> BAII
swap = B.swap
toBipartite :: AII -> BAII
toBipartite = B.toBipartite
toBipartiteWith :: Ord a => (a -> Either Int Int) -> AM.AdjacencyMap a -> BAII
toBipartiteWith = B.toBipartiteWith
fromBipartite :: BAII -> AII
fromBipartite = B.fromBipartite
biclique :: [Int] -> [Int] -> BAII
biclique = B.biclique
star :: Int -> [Int] -> BAII
star = B.star
stars :: [(Int, [Int])] -> BAII
stars = B.stars
removeLeftVertex :: Int -> BAII -> BAII
removeLeftVertex = B.removeLeftVertex
removeRightVertex :: Int -> BAII -> BAII
removeRightVertex = B.removeRightVertex
removeEdge :: Int -> Int -> BAII -> BAII
removeEdge = B.removeEdge
putStrLn "\n============ Bipartite.AdjacencyMap.Num ============"
test "0 == rightVertex 0" $
0 == rightVertex 0
test "swap 1 == leftVertex 1" $
swap 1 == leftVertex 1
test "swap 1 + 2 == vertices [1] [2]" $
swap 1 + 2 == vertices [1] [2]
test "swap 1 * 2 == edge 1 2" $
swap 1 * 2 == edge 1 2
test "swap 1 + 2 * swap 3 == overlay (leftVertex 1) (edge 3 2)" $
swap 1 + 2 * swap 3 == overlay (leftVertex 1) (edge 3 2)
test "swap 1 * (2 + swap 3) == connect (leftVertex 1) (vertices [3] [2])" $
swap 1 * (2 + swap 3) == connect (leftVertex 1) (vertices [3] [2])
putStrLn "\n============ Bipartite.AdjacencyMap.Show ============"
test "show empty == \"empty\"" $
show empty == "empty"
test "show 1 == \"rightVertex 1\"" $
show 1 == "rightVertex 1"
test "show (swap 2) == \"leftVertex 2\"" $
show (swap 2) == "leftVertex 2"
test "show 1 + 2 == \"vertices [] [1,2]\"" $
show (1 + 2) == "vertices [] [1,2]"
test "show (swap (1 + 2)) == \"vertices [1,2] []\"" $
show (swap (1 + 2)) == "vertices [1,2] []"
test "show (swap 1 * 2) == \"edge 1 2\"" $
show (swap 1 * 2) == "edge 1 2"
test "show (swap 1 * 2 * swap 3) == \"edges [(1,2),(3,2)]\"" $
show (swap 1 * 2 * swap 3) == "edges [(1,2),(3,2)]"
test "show (swap 1 * 2 + swap 3) == \"overlay (leftVertex 3) (edge 1 2)\"" $
show (swap 1 * 2 + swap 3) == "overlay (leftVertex 3) (edge 1 2)"
putStrLn "\n============ Bipartite.AdjacencyMap.Eq ============"
test "(x == y) == (leftAdjacencyMap x == leftAdjacencyMap y && rightAdjacencyMap x == rightAdjacencyMap y)" $ \(x :: BAII) (y :: BAII) ->
(x == y) == (leftAdjacencyMap x == leftAdjacencyMap y && rightAdjacencyMap x == rightAdjacencyMap y)
putStrLn ""
test " x + y == y + x" $ \(x :: BAII) y ->
x + y == y + x
test " x + (y + z) == (x + y) + z" $ \(x :: BAII) y z ->
x + (y + z) == (x + y) + z
test " x * empty == x" $ \(x :: BAII) ->
x * empty == x
test " empty * x == x" $ \(x :: BAII) ->
empty * x == x
test " x * y == y * x" $ \(x :: BAII) y ->
x * y == y * x
test " x * (y * z) == (x * y) * z" $ size10 $ \(x :: BAII) y z ->
x * (y * z) == (x * y) * z
test " x * (y + z) == x * y + x * z" $ size10 $ \(x :: BAII) y z ->
x * (y + z) == x * (y + z)
test " (x + y) * z == x * z + y * z" $ size10 $ \(x :: BAII) y z ->
(x + y) * z == x * z + y * z
test " x * y * z == x * y + x * z + y * z" $ size10 $ \(x :: BAII) y z ->
x * y * z == x * y + x * z + y * z
test " x + empty == x" $ \(x :: BAII) ->
x + empty == x
test " empty + x == x" $ \(x :: BAII) ->
empty + x == x
test " x + x == x" $ \(x :: BAII) ->
x + x == x
test "x * y + x + y == x * y" $ \(x :: BAII) (y :: BAII) ->
x * y + x + y == x * y
test " x * x * x == x * x" $ size10 $ \(x :: BAII) ->
x * x * x == x * x
putStrLn ""
test " leftVertex x * leftVertex y == leftVertex x + leftVertex y " $ \x y ->
leftVertex x * leftVertex y == leftVertex x + leftVertex y
test "rightVertex x * rightVertex y == rightVertex x + rightVertex y" $ \x y ->
rightVertex x * rightVertex y == rightVertex x + rightVertex y
putStrLn "\n============ Bipartite.AdjacencyMap.leftAdjacencyMap ============"
test "leftAdjacencyMap empty == Map.empty" $
leftAdjacencyMap empty == Map.empty
test "leftAdjacencyMap (leftVertex x) == Map.singleton x Set.empty" $ \x ->
leftAdjacencyMap (leftVertex x) == Map.singleton x Set.empty
test "leftAdjacencyMap (rightVertex x) == Map.empty" $ \x ->
leftAdjacencyMap (rightVertex x) == Map.empty
test "leftAdjacencyMap (edge x y) == Map.singleton x (Set.singleton y)" $ \x y ->
leftAdjacencyMap (edge x y) == Map.singleton x (Set.singleton y)
putStrLn "\n============ Bipartite.AdjacencyMap.rightAdjacencyMap ============"
test "rightAdjacencyMap empty == Map.empty" $
rightAdjacencyMap empty == Map.empty
test "rightAdjacencyMap (leftVertex x) == Map.empty" $ \x ->
rightAdjacencyMap (leftVertex x) == Map.empty
test "rightAdjacencyMap (rightVertex x) == Map.singleton x Set.empty" $ \x ->
rightAdjacencyMap (rightVertex x) == Map.singleton x Set.empty
test "rightAdjacencyMap (edge x y) == Map.singleton y (Set.singleton x)" $ \x y ->
rightAdjacencyMap (edge x y) == Map.singleton y (Set.singleton x)
putStrLn "\n============ Bipartite.AdjacencyMap.empty ============"
test "isEmpty empty == True" $
isEmpty empty == True
test "leftAdjacencyMap empty == Map.empty" $
leftAdjacencyMap empty == Map.empty
test "rightAdjacencyMap empty == Map.empty" $
rightAdjacencyMap empty == Map.empty
test "hasVertex x empty == False" $ \x ->
hasVertex x empty == False
putStrLn "\n============ Bipartite.AdjacencyMap.leftVertex ============"
test "leftAdjacencyMap (leftVertex x) == Map.singleton x Set.empty" $ \x ->
leftAdjacencyMap (leftVertex x) == Map.singleton x Set.empty
test "rightAdjacencyMap (leftVertex x) == Map.empty" $ \x ->
rightAdjacencyMap (leftVertex x) == Map.empty
test "hasLeftVertex x (leftVertex y) == (x == y)" $ \x y ->
hasLeftVertex x (leftVertex y) == (x == y)
test "hasRightVertex x (leftVertex y) == False" $ \x y ->
hasRightVertex x (leftVertex y) == False
test "hasEdge x y (leftVertex z) == False" $ \x y z ->
hasEdge x y (leftVertex z) == False
putStrLn "\n============ Bipartite.AdjacencyMap.rightVertex ============"
test "leftAdjacencyMap (rightVertex x) == Map.empty" $ \x ->
leftAdjacencyMap (rightVertex x) == Map.empty
test "rightAdjacencyMap (rightVertex x) == Map.singleton x Set.empty" $ \x ->
rightAdjacencyMap (rightVertex x) == Map.singleton x Set.empty
test "hasLeftVertex x (rightVertex y) == False" $ \x y ->
hasLeftVertex x (rightVertex y) == False
test "hasRightVertex x (rightVertex y) == (x == y)" $ \x y ->
hasRightVertex x (rightVertex y) == (x == y)
test "hasEdge x y (rightVertex z) == False" $ \x y z ->
hasEdge x y (rightVertex z) == False
putStrLn "\n============ Bipartite.AdjacencyMap.vertex ============"
test "vertex . Left == leftVertex" $ \x ->
(vertex . Left) x == leftVertex x
test "vertex . Right == rightVertex" $ \x ->
(vertex . Right) x == rightVertex x
putStrLn "\n============ Bipartite.AdjacencyMap.edge ============"
test "edge x y == connect (leftVertex x) (rightVertex y)" $ \x y ->
edge x y == connect (leftVertex x) (rightVertex y)
test "leftAdjacencyMap (edge x y) == Map.singleton x (Set.singleton y)" $ \x y ->
leftAdjacencyMap (edge x y) == Map.singleton x (Set.singleton y)
test "rightAdjacencyMap (edge x y) == Map.singleton y (Set.singleton x)" $ \x y ->
rightAdjacencyMap (edge x y) == Map.singleton y (Set.singleton x)
test "hasEdge x y (edge x y) == True" $ \x y ->
hasEdge x y (edge x y) == True
test "hasEdge 1 2 (edge 2 1) == False" $
hasEdge 1 2 (edge 2 1) == False
putStrLn "\n============ Bipartite.AdjacencyMap.overlay ============"
test "isEmpty (overlay x y) == isEmpty x && isEmpty y" $ \x y ->
isEmpty (overlay x y) ==(isEmpty x && isEmpty y)
test "hasVertex z (overlay x y) == hasVertex z x || hasVertex z y" $ \x y z ->
hasVertex z (overlay x y) ==(hasVertex z x || hasVertex z y)
test "vertexCount (overlay x y) >= vertexCount x" $ \x y ->
vertexCount (overlay x y) >= vertexCount x
test "vertexCount (overlay x y) <= vertexCount x + vertexCount y" $ \x y ->
vertexCount (overlay x y) <= vertexCount x + vertexCount y
test "edgeCount (overlay x y) >= edgeCount x" $ \x y ->
edgeCount (overlay x y) >= edgeCount x
test "edgeCount (overlay x y) <= edgeCount x + edgeCount y" $ \x y ->
edgeCount (overlay x y) <= edgeCount x + edgeCount y
putStrLn "\n============ Bipartite.AdjacencyMap.connect ============"
test "connect (leftVertex x) (leftVertex y) == vertices [x,y] []" $ \x y ->
connect (leftVertex x) (leftVertex y) == vertices [x,y] []
test "connect (leftVertex x) (rightVertex y) == edge x y" $ \x y ->
connect (leftVertex x) (rightVertex y) == edge x y
test "connect (rightVertex x) (leftVertex y) == edge y x" $ \x y ->
connect (rightVertex x) (leftVertex y) == edge y x
test "connect (rightVertex x) (rightVertex y) == vertices [] [x,y]" $ \x y ->
connect (rightVertex x) (rightVertex y) == vertices [] [x,y]
test "connect (vertices xs1 ys1) (vertices xs2 ys2) == overlay (biclique xs1 ys2) (biclique xs2 ys1)" $ \xs1 ys1 xs2 ys2 ->
connect (vertices xs1 ys1) (vertices xs2 ys2) == overlay (biclique xs1 ys2) (biclique xs2 ys1)
test "isEmpty (connect x y) == isEmpty x && isEmpty y" $ \x y ->
isEmpty (connect x y) ==(isEmpty x && isEmpty y)
test "hasVertex z (connect x y) == hasVertex z x || hasVertex z y" $ \x y z ->
hasVertex z (connect x y) ==(hasVertex z x || hasVertex z y)
test "vertexCount (connect x y) >= vertexCount x" $ \x y ->
vertexCount (connect x y) >= vertexCount x
test "vertexCount (connect x y) <= vertexCount x + vertexCount y" $ \x y ->
vertexCount (connect x y) <= vertexCount x + vertexCount y
test "edgeCount (connect x y) >= edgeCount x" $ \x y ->
edgeCount (connect x y) >= edgeCount x
test "edgeCount (connect x y) >= leftVertexCount x * rightVertexCount y" $ \x y ->
edgeCount (connect x y) >= leftVertexCount x * rightVertexCount y
test "edgeCount (connect x y) <= leftVertexCount x * rightVertexCount y + rightVertexCount x * leftVertexCount y + edgeCount x + edgeCount y" $ \x y ->
edgeCount (connect x y) <= leftVertexCount x * rightVertexCount y + rightVertexCount x * leftVertexCount y + edgeCount x + edgeCount y
putStrLn "\n============ Bipartite.AdjacencyMap.vertices ============"
test "vertices [] [] == empty" $
vertices [] [] == empty
test "vertices [x] [] == leftVertex x" $ \x ->
vertices [x] [] == leftVertex x
test "vertices [] [x] == rightVertex x" $ \x ->
vertices [] [x] == rightVertex x
test "vertices xs ys == overlays (map leftVertex xs ++ map rightVertex ys)" $ \xs ys ->
vertices xs ys == overlays (map leftVertex xs ++ map rightVertex ys)
test "hasLeftVertex x (vertices xs ys) == elem x xs" $ \x xs ys ->
hasLeftVertex x (vertices xs ys) == elem x xs
test "hasRightVertex y (vertices xs ys) == elem y ys" $ \y xs ys ->
hasRightVertex y (vertices xs ys) == elem y ys
putStrLn "\n============ Bipartite.AdjacencyMap.edges ============"
test "edges [] == empty" $
edges [] == empty
test "edges [(x,y)] == edge x y" $ \x y ->
edges [(x,y)] == edge x y
test "edges == overlays . map (uncurry edge)" $ \xs ->
edges xs == (overlays . map (uncurry edge)) xs
test "hasEdge x y . edges == elem (x,y)" $ \x y es ->
(hasEdge x y . edges) es == elem (x,y) es
test "edgeCount . edges == length . nub" $ \es ->
(edgeCount . edges) es == (length . nubOrd) es
putStrLn "\n============ Bipartite.AdjacencyMap.overlays ============"
test "overlays [] == empty" $
overlays [] == empty
test "overlays [x] == x" $ \x ->
overlays [x] == x
test "overlays [x,y] == overlay x y" $ \x y ->
overlays [x,y] == overlay x y
test "overlays == foldr overlay empty" $ size10 $ \xs ->
overlays xs == foldr overlay empty xs
test "isEmpty . overlays == all isEmpty" $ size10 $ \xs ->
(isEmpty . overlays) xs == all isEmpty xs
putStrLn "\n============ Bipartite.AdjacencyMap.connects ============"
test "connects [] == empty" $
connects [] == empty
test "connects [x] == x" $ \x ->
connects [x] == x
test "connects [x,y] == connect x y" $ \x y ->
connects [x,y] == connect x y
test "connects == foldr connect empty" $ size10 $ \xs ->
connects xs == foldr connect empty xs
test "isEmpty . connects == all isEmpty" $ size10 $ \ xs ->
(isEmpty . connects) xs == all isEmpty xs
putStrLn "\n============ Bipartite.AdjacencyMap.swap ============"
test "swap empty == empty" $
swap empty == empty
test "swap . leftVertex == rightVertex" $ \x ->
(swap . leftVertex) x == rightVertex x
test "swap (vertices xs ys) == vertices ys xs" $ \xs ys ->
swap (vertices xs ys) == vertices ys xs
test "swap (edge x y) == edge y x" $ \x y ->
swap (edge x y) == edge y x
test "swap . edges == edges . map Data.Tuple.swap" $ \es ->
(swap . edges) es == (edges . map Data.Tuple.swap) es
test "swap . swap == id" $ \x ->
(swap . swap) x == x
putStrLn "\n============ Bipartite.AdjacencyMap.toBipartite ============"
test "toBipartite empty == empty" $
toBipartite AM.empty == empty
test "toBipartite (vertex (Left x)) == leftVertex x" $ \x ->
toBipartite (AM.vertex (Left x)) == leftVertex x
test "toBipartite (vertex (Right x)) == rightVertex x" $ \x ->
toBipartite (AM.vertex (Right x)) == rightVertex x
test "toBipartite (edge (Left x) (Left y)) == vertices [x,y] []" $ \x y ->
toBipartite (AM.edge (Left x) (Left y)) == vertices [x,y] []
test "toBipartite (edge (Left x) (Right y)) == edge x y" $ \x y ->
toBipartite (AM.edge (Left x) (Right y)) == edge x y
test "toBipartite (edge (Right x) (Left y)) == edge y x" $ \x y ->
toBipartite (AM.edge (Right x) (Left y)) == edge y x
test "toBipartite (edge (Right x) (Right y)) == vertices [] [x,y]" $ \x y ->
toBipartite (AM.edge (Right x) (Right y)) == vertices [] [x,y]
test "toBipartite . clique == uncurry biclique . partitionEithers" $ \xs ->
(toBipartite . AM.clique) xs == (uncurry biclique . partitionEithers) xs
test "toBipartite . fromBipartite == id" $ \x ->
(toBipartite . fromBipartite) x == x
putStrLn "\n============ Bipartite.AdjacencyMap.toBipartiteWith ============"
test "toBipartiteWith f empty == empty" $ \(apply -> f) ->
toBipartiteWith f (AM.empty :: AII) == empty
test "toBipartiteWith Left x == vertices (vertexList x) []" $ \x ->
toBipartiteWith Left x == vertices (AM.vertexList x) []
test "toBipartiteWith Right x == vertices [] (vertexList x)" $ \x ->
toBipartiteWith Right x == vertices [] (AM.vertexList x)
test "toBipartiteWith f == toBipartite . gmap f" $ \(apply -> f) x ->
toBipartiteWith f x == (toBipartite . AM.gmap f) (x :: AII)
test "toBipartiteWith id == toBipartite" $ \x ->
toBipartiteWith id x == toBipartite x
putStrLn "\n============ Bipartite.AdjacencyMap.fromBipartite ============"
test "fromBipartite empty == empty" $
fromBipartite empty == AM.empty
test "fromBipartite (leftVertex x) == vertex (Left x)" $ \x ->
fromBipartite (leftVertex x) == AM.vertex (Left x)
test "fromBipartite (edge x y) == edges [(Left x, Right y), (Right y, Left x)]" $ \x y ->
fromBipartite (edge x y) == AM.edges [(Left x, Right y), (Right y, Left x)]
putStrLn "\n============ Bipartite.AdjacencyMap.fromBipartiteWith ============"
test "fromBipartiteWith Left Right == fromBipartite" $ \x ->
fromBipartiteWith Left Right x == fromBipartite x
test "fromBipartiteWith id id (vertices xs ys) == vertices (xs ++ ys)" $ \xs ys ->
fromBipartiteWith id id (vertices xs ys) == AM.vertices (xs ++ ys)
test "fromBipartiteWith id id . edges == symmetricClosure . edges" $ \xs ->
(fromBipartiteWith id id . edges) xs == (AM.symmetricClosure . AM.edges) xs
putStrLn "\n============ Bipartite.AdjacencyMap.isEmpty ============"
test "isEmpty empty == True" $
isEmpty empty == True
test "isEmpty (overlay empty empty) == True" $
isEmpty (overlay empty empty) == True
test "isEmpty (vertex x) == False" $ \x ->
isEmpty (vertex x) == False
test "isEmpty == (==) empty" $ \x ->
isEmpty x == (==) empty x
putStrLn "\n============ Bipartite.AdjacencyMap.hasLeftVertex ============"
test "hasLeftVertex x empty == False" $ \x ->
hasLeftVertex x empty == False
test "hasLeftVertex x (leftVertex y) == (x == y)" $ \x y ->
hasLeftVertex x (leftVertex y) == (x == y)
test "hasLeftVertex x (rightVertex y) == False" $ \x y ->
hasLeftVertex x (rightVertex y) == False
putStrLn "\n============ Bipartite.AdjacencyMap.hasRightVertex ============"
test "hasRightVertex x empty == False" $ \x ->
hasRightVertex x empty == False
test "hasRightVertex x (leftVertex y) == False" $ \x y ->
hasRightVertex x (leftVertex y) == False
test "hasRightVertex x (rightVertex y) == (x == y)" $ \x y ->
hasRightVertex x (rightVertex y) == (x == y)
putStrLn "\n============ Bipartite.AdjacencyMap.hasVertex ============"
test "hasVertex . Left == hasLeftVertex" $ \x y ->
(hasVertex . Left) x y == hasLeftVertex x y
test "hasVertex . Right == hasRightVertex" $ \x y ->
(hasVertex . Right) x y == hasRightVertex x y
putStrLn "\n============ Bipartite.AdjacencyMap.hasEdge ============"
test "hasEdge x y empty == False" $ \x y ->
hasEdge x y empty == False
test "hasEdge x y (vertex z) == False" $ \x y z ->
hasEdge x y (vertex z) == False
test "hasEdge x y (edge x y) == True" $ \x y ->
hasEdge x y (edge x y) == True
test "hasEdge x y == elem (x,y) . edgeList" $ \x y z -> do
let es = edgeList z
(x, y) <- elements ((x, y) : es)
return $ hasEdge x y z == elem (x, y) es
putStrLn "\n============ Bipartite.AdjacencyMap.leftVertexCount ============"
test "leftVertexCount empty == 0" $
leftVertexCount empty == 0
test "leftVertexCount (leftVertex x) == 1" $ \x ->
leftVertexCount (leftVertex x) == 1
test "leftVertexCount (rightVertex x) == 0" $ \x ->
leftVertexCount (rightVertex x) == 0
test "leftVertexCount (edge x y) == 1" $ \x y ->
leftVertexCount (edge x y) == 1
test "leftVertexCount . edges == length . nub . map fst" $ \xs ->
(leftVertexCount . edges) xs == (length . nub . map fst) xs
putStrLn "\n============ Bipartite.AdjacencyMap.rightVertexCount ============"
test "rightVertexCount empty == 0" $
rightVertexCount empty == 0
test "rightVertexCount (leftVertex x) == 0" $ \x ->
rightVertexCount (leftVertex x) == 0
test "rightVertexCount (rightVertex x) == 1" $ \x ->
rightVertexCount (rightVertex x) == 1
test "rightVertexCount (edge x y) == 1" $ \x y ->
rightVertexCount (edge x y) == 1
test "rightVertexCount . edges == length . nub . map snd" $ \xs ->
(rightVertexCount . edges) xs == (length . nub . map snd) xs
putStrLn "\n============ Bipartite.AdjacencyMap.vertexCount ============"
test "vertexCount empty == 0" $
vertexCount empty == 0
test "vertexCount (vertex x) == 1" $ \x ->
vertexCount (vertex x) == 1
test "vertexCount (edge x y) == 2" $ \x y ->
vertexCount (edge x y) == 2
test "vertexCount x == leftVertexCount x + rightVertexCount x" $ \x ->
vertexCount x == leftVertexCount x + rightVertexCount x
putStrLn "\n============ Bipartite.AdjacencyMap.edgeCount ============"
test "edgeCount empty == 0" $
edgeCount empty == 0
test "edgeCount (vertex x) == 0" $ \x ->
edgeCount (vertex x) == 0
test "edgeCount (edge x y) == 1" $ \x y ->
edgeCount (edge x y) == 1
test "edgeCount . edges == length . nub" $ \xs ->
(edgeCount . edges) xs == (length . nubOrd) xs
putStrLn "\n============ Bipartite.AdjacencyMap.leftVertexList ============"
test "leftVertexList empty == []" $
leftVertexList empty == []
test "leftVertexList (leftVertex x) == [x]" $ \x ->
leftVertexList (leftVertex x) == [x]
test "leftVertexList (rightVertex x) == []" $ \x ->
leftVertexList (rightVertex x) == []
test "leftVertexList . flip vertices [] == nub . sort" $ \xs ->
(leftVertexList . flip vertices []) xs == (nubOrd . sort) xs
putStrLn "\n============ Bipartite.AdjacencyMap.rightVertexList ============"
test "rightVertexList empty == []" $
rightVertexList empty == []
test "rightVertexList (leftVertex x) == []" $ \x ->
rightVertexList (leftVertex x) == []
test "rightVertexList (rightVertex x) == [x]" $ \x ->
rightVertexList (rightVertex x) == [x]
test "rightVertexList . vertices [] == nub . sort" $ \xs ->
(rightVertexList . vertices []) xs == (nubOrd . sort) xs
putStrLn "\n============ Bipartite.AdjacencyMap.vertexList ============"
test "vertexList empty == []" $
vertexList empty == []
test "vertexList (vertex x) == [x]" $ \x ->
vertexList (vertex x) == [x]
test "vertexList (edge x y) == [Left x, Right y]" $ \x y ->
vertexList (edge x y) == [Left x, Right y]
test "vertexList (vertices (lefts xs) (rights xs)) == nub (sort xs)" $ \xs ->
vertexList (vertices (lefts xs) (rights xs)) == nubOrd (sort xs)
putStrLn "\n============ Bipartite.AdjacencyMap.edgeList ============"
test "edgeList empty == []" $
edgeList empty == []
test "edgeList (vertex x) == []" $ \x ->
edgeList (vertex x) == []
test "edgeList (edge x y) == [(x,y)]" $ \x y ->
edgeList (edge x y) == [(x,y)]
test "edgeList . edges == nub . sort" $ \xs ->
(edgeList . edges) xs == (nubOrd . sort) xs
putStrLn "\n============ Bipartite.AdjacencyMap.leftVertexSet ============"
test "leftVertexSet empty == Set.empty" $
leftVertexSet empty == Set.empty
test "leftVertexSet . leftVertex == Set.singleton" $ \x ->
(leftVertexSet . leftVertex) x == Set.singleton x
test "leftVertexSet . rightVertex == const Set.empty" $ \x ->
(leftVertexSet . rightVertex) x == const Set.empty x
test "leftVertexSet . flip vertices [] == Set.fromList" $ \xs ->
(leftVertexSet . flip vertices []) xs == Set.fromList xs
putStrLn "\n============ Bipartite.AdjacencyMap.rightVertexSet ============"
test "rightVertexSet empty == Set.empty" $
rightVertexSet empty == Set.empty
test "rightVertexSet . leftVertex == const Set.empty" $ \x ->
(rightVertexSet . leftVertex) x == const Set.empty x
test "rightVertexSet . rightVertex == Set.singleton" $ \x ->
(rightVertexSet . rightVertex) x == Set.singleton x
test "rightVertexSet . vertices [] == Set.fromList" $ \xs ->
(rightVertexSet . vertices []) xs == Set.fromList xs
putStrLn "\n============ Bipartite.AdjacencyMap.vertexSet ============"
test "vertexSet empty == Set.empty" $
vertexSet empty == Set.empty
test "vertexSet . vertex == Set.singleton" $ \x ->
(vertexSet . vertex) x == Set.singleton x
test "vertexSet (edge x y) == Set.fromList [Left x, Right y]" $ \x y ->
vertexSet (edge x y) == Set.fromList [Left x, Right y]
test "vertexSet (vertices (lefts xs) (rights xs)) == Set.fromList xs" $ \xs ->
vertexSet (vertices (lefts xs) (rights xs)) == Set.fromList xs
putStrLn "\n============ Bipartite.AdjacencyMap.edgeSet ============"
test "edgeSet empty == Set.empty" $
edgeSet empty == Set.empty
test "edgeSet (vertex x) == Set.empty" $ \x ->
edgeSet (vertex x) == Set.empty
test "edgeSet (edge x y) == Set.singleton (x,y)" $ \x y ->
edgeSet (edge x y) == Set.singleton (x,y)
test "edgeSet . edges == Set.fromList" $ \xs ->
(edgeSet . edges) xs == Set.fromList xs
putStrLn "\n============ Bipartite.AdjacencyMap.leftAdjacencyList ============"
test "leftAdjacencyList empty == []" $
leftAdjacencyList empty == []
test "leftAdjacencyList (vertices [] xs) == []" $ \xs ->
leftAdjacencyList (vertices [] xs) == []
test "leftAdjacencyList (vertices xs []) == []" $ \xs ->
leftAdjacencyList (vertices xs []) == [(x, []) | x <- nubOrd (sort xs)]
test "leftAdjacencyList (edge x y) == [(x, [y])]" $ \x y ->
leftAdjacencyList (edge x y) == [(x, [y])]
test "leftAdjacencyList (star x ys) == [(x, nub (sort ys))]" $ \x ys ->
leftAdjacencyList (star x ys) == [(x, nubOrd (sort ys))]
putStrLn "\n============ Bipartite.AdjacencyMap.rightAdjacencyList ============"
test "rightAdjacencyList empty == []" $
rightAdjacencyList empty == []
test "rightAdjacencyList (vertices [] xs) == [(x, []) | x <- nub (sort xs)]" $ \xs ->
rightAdjacencyList (vertices [] xs) == [(x, []) | x <- nubOrd (sort xs)]
test "rightAdjacencyList (vertices xs []) == []" $ \xs ->
rightAdjacencyList (vertices xs []) == []
test "rightAdjacencyList (edge x y) == [(y, [x])]" $ \x y ->
rightAdjacencyList (edge x y) == [(y, [x])]
test "rightAdjacencyList (star x ys) == [(y, [x]) | y <- nub (sort ys)]" $ \x ys ->
rightAdjacencyList (star x ys) == [(y, [x]) | y <- nubOrd (sort ys)]
putStrLn "\n============ Bipartite.AdjacencyMap.evenList ============"
test "evenList [] == Nil" $
evenList [] == Nil @Int @Int
test "evenList [(1,2), (3,4)] == [1, 2, 3, 4] :: List Int Int" $
evenList [(1,2), (3,4)] == ([1, 2, 3, 4] :: List Int Int)
test "evenList [(1,'a'), (2,'b')] == Cons 1 (Cons 'a' (Cons 2 (Cons 'b' Nil)))" $
evenList [(1,'a'), (2 :: Int,'b')] == Cons 1 (Cons 'a' (Cons 2 (Cons 'b' Nil)))
putStrLn "\n============ Bipartite.AdjacencyMap.oddList ============"
test "oddList 1 [] == Cons 1 Nil" $
oddList 1 [] == Cons 1 (Nil @Int @Int)
test "oddList 1 [(2,3), (4,5)] == [1, 2, 3, 4, 5] :: List Int Int" $
oddList 1 [(2,3), (4,5)] ==([1, 2, 3, 4, 5] :: List Int Int)
test "oddList 1 [('a',2), ('b',3)] == Cons 1 (Cons 'a' (Cons 2 (Cons 'b' (Cons 3 Nil))))" $
oddList 1 [('a',2), ('b',3)] == Cons 1 (Cons 'a' (Cons 2 (Cons 'b' (Cons @Int 3 Nil))))
putStrLn "\n============ Bipartite.AdjacencyMap.path ============"
test "path Nil == empty" $
path Nil == empty
test "path (Cons x Nil) == leftVertex x" $ \x ->
path (Cons x Nil) == leftVertex x
test "path (Cons x (Cons y Nil)) == edge x y" $ \x y ->
path (Cons x (Cons y Nil)) == edge x y
test "path [1, 2, 3, 4, 5] == edges [(1,2), (3,2), (3,4), (5,4)]" $
path [1, 2, 3, 4, 5] == edges [(1,2), (3,2), (3,4), (5,4)]
putStrLn "\n============ Bipartite.AdjacencyMap.circuit ============"
test "circuit [] == empty" $
circuit [] == empty
test "circuit [(x,y)] == edge x y" $ \x y ->
circuit [(x,y)] == edge x y
test "circuit [(1,2), (3,4), (5,6)] == edges [(1,2), (3,2), (3,4), (5,4), (5,6), (1,6)]" $
circuit [(1,2), (3,4), (5,6)] == edges [(1,2), (3,2), (3,4), (5,4), (5,6), (1,6)]
test "circuit . reverse == swap . circuit . map Data.Tuple.swap" $ \xs ->
(circuit . reverse) xs == (swap . circuit . map Data.Tuple.swap) xs
putStrLn "\n============ Bipartite.AdjacencyMap.biclique ============"
test "biclique [] [] == empty" $
biclique [] [] == empty
test "biclique xs [] == vertices xs []" $ \xs ->
biclique xs [] == vertices xs []
test "biclique [] ys == vertices [] ys" $ \ys ->
biclique [] ys == vertices [] ys
test "biclique xs ys == connect (vertices xs []) (vertices [] ys)" $ \xs ys ->
biclique xs ys == connect (vertices xs []) (vertices [] ys)
putStrLn "\n============ Bipartite.AdjacencyMap.star ============"
test "star x [] == leftVertex x" $ \x ->
star x [] == leftVertex x
test "star x [y] == edge x y" $ \x y ->
star x [y] == edge x y
test "star x [y,z] == edges [(x,y), (x,z)]" $ \x y z ->
star x [y,z] == edges [(x,y), (x,z)]
test "star x ys == connect (leftVertex x) (vertices [] ys)" $ \x ys ->
star x ys == connect (leftVertex x) (vertices [] ys)
putStrLn "\n============ Bipartite.AdjacencyMap.stars ============"
test "stars [] == empty" $
stars [] == empty
test "stars [(x, [])] == leftVertex x" $ \x ->
stars [(x, [])] == leftVertex x
test "stars [(x, [y])] == edge x y" $ \x y ->
stars [(x, [y])] == edge x y
test "stars [(x, ys)] == star x ys" $ \x ys ->
stars [(x, ys)] == star x ys
test "star x [y,z] == edges [(x,y), (x,z)]" $ \x y z ->
star x [y,z] == edges [(x,y), (x,z)]
test "stars == overlays . map (uncurry star)" $ \xs ->
stars xs == (overlays . map (uncurry star)) xs
test "overlay (stars xs) (stars ys) == stars (xs ++ ys)" $ \xs ys ->
overlay (stars xs) (stars ys) == stars (xs ++ ys)
putStrLn "\n============ Bipartite.AdjacencyMap.mesh ============"
test "mesh xs [] == empty" $ \xs ->
mesh xs [] == B.empty @(Int,Int)
test "mesh [] ys == empty" $ \ys ->
mesh [] ys == B.empty @(Int,Int)
test "mesh [x] [y] == leftVertex (x,y)" $ \x y ->
mesh [x] [y] == B.leftVertex @(Int,Int) (x,y)
test "mesh [1,1] ['a','b'] == biclique [(1,'a'), (1,'b')] [(1,'a'), (1,'b')]" $
mesh [1,1] ['a','b'] == B.biclique @(Int,Char) [(1,'a'), (1,'b')] [(1,'a'), (1,'b')]
test "mesh [1,2] ['a','b'] == biclique [(1,'a'), (2,'b')] [(1,'b'), (2,'a')]" $
mesh [1,2] ['a','b'] == B.biclique @(Int,Char) [(1,'a'), (2,'b')] [(1,'b'), (2,'a')]
putStrLn "\n============ Bipartite.AdjacencyMap.removeLeftVertex ============"
test "removeLeftVertex x (leftVertex x) == empty" $ \x ->
removeLeftVertex x (leftVertex x) == empty
test "removeLeftVertex 1 (leftVertex 2) == leftVertex 2" $
removeLeftVertex 1 (leftVertex 2) ==(leftVertex 2 :: BAII)
test "removeLeftVertex x (rightVertex y) == rightVertex y" $ \x y ->
removeLeftVertex x (rightVertex y) == rightVertex y
test "removeLeftVertex x (edge x y) == rightVertex y" $ \x y ->
removeLeftVertex x (edge x y) == rightVertex y
test "removeLeftVertex x . removeLeftVertex x == removeLeftVertex x" $ \x (g :: BAII)->
(removeLeftVertex x . removeLeftVertex x) g == removeLeftVertex x g
putStrLn "\n============ Bipartite.AdjacencyMap.removeRightVertex ============"
test "removeRightVertex x (rightVertex x) == empty" $ \x ->
removeRightVertex x (rightVertex x) == empty
test "removeRightVertex 1 (rightVertex 2) == rightVertex 2" $
removeRightVertex 1 (rightVertex 2) ==(rightVertex 2 :: BAII)
test "removeRightVertex x (leftVertex y) == leftVertex y" $ \x y ->
removeRightVertex x (leftVertex y) == leftVertex y
test "removeRightVertex y (edge x y) == leftVertex x" $ \x y ->
removeRightVertex y (edge x y) == leftVertex x
test "removeRightVertex x . removeRightVertex x == removeRightVertex x" $ \x (y :: BAII)->
(removeRightVertex x . removeRightVertex x) y == removeRightVertex x y
putStrLn "\n============ Bipartite.AdjacencyMap.removeEdge ============"
test "removeEdge x y (edge x y) == vertices [x] [y]" $ \x y ->
removeEdge x y (edge x y) == vertices [x] [y]
test "removeEdge x y . removeEdge x y == removeEdge x y" $ \x y z ->
(removeEdge x y . removeEdge x y) z == removeEdge x y z
test "removeEdge x y . removeLeftVertex x == removeLeftVertex x" $ \x y z ->
(removeEdge x y . removeLeftVertex x) z == removeLeftVertex x z
test "removeEdge x y . removeRightVertex y == removeRightVertex y" $ \x y z ->
(removeEdge x y . removeRightVertex y) z == removeRightVertex y z
putStrLn "\n============ Bipartite.AdjacencyMap.bimap ============"
test "bimap f g empty == empty" $ \(apply -> f) (apply -> g) ->
bimap f g empty == empty
test "bimap f g . vertex == vertex . Data.Bifunctor.bimap f g" $ \(apply -> f) (apply -> g) x ->
(bimap f g . vertex) x ==(vertex . Bifunctor.bimap f g) x
test "bimap f g (edge x y) == edge (f x) (g y)" $ \(apply -> f) (apply -> g) x y ->
bimap f g (edge x y) == edge (f x) (g y)
test "bimap id id == id" $ \(x :: BAII) ->
bimap id id x == id x
test "bimap f1 g1 . bimap f2 g2 == bimap (f1 . f2) (g1 . g2)" $ \(apply -> f1 :: Int -> Int) (apply -> g1 :: Int -> Int) (apply -> f2 :: Int -> Int) (apply -> g2 :: Int -> Int) x ->
(bimap f1 g1 . bimap f2 g2) x == bimap (f1 . f2) (g1 . g2) x
putStrLn "\n============ Bipartite.AdjacencyMap.box ============"
test "box (path [0,1]) (path ['a','b']) == <correct result>" $
box (path [0,1]) (path ['a','b']) == B.edges @(Int,Char) [ ((0,'a'), (0,'b'))
, ((0,'a'), (1,'a'))
, ((1,'b'), (0,'b'))
, ((1,'b'), (1,'a')) ]
let unit x = (x, ())
biunit = B.bimap unit unit
comm (x, y) = (y, x)
bicomm = B.bimap comm comm
assoc ((x, y), z) = (x, (y, z))
biassoc = B.bimap assoc assoc
putStrLn ""
test "box x y ~~ box y x" $ size10 $ \(x :: BAII) (y :: BAII) ->
box x y == bicomm (box y x)
test "box x (box y z) ~~ box (box x y) z" $ size10 $ \(x :: BAII) (y :: BAII) (z :: BAII) ->
box x (box y z) == biassoc (box (box x y) z)
test "box x (box y z) ~~ box (box x y) z" $ mapSize (min 3) $ \(x :: BAII) (y :: BAII) (z :: BAII) ->
box x (box y z) == biassoc (box (box x y) z)
test "box x (leftVertex ()) ~~ x" $ size10 $ \(x :: BAII) ->
box x (B.leftVertex ()) == biunit x
test "box x (rightVertex ()) ~~ swap x" $ size10 $ \(x :: BAII) ->
box x (B.rightVertex ()) == biunit (B.swap x)
test "box x empty ~~ empty" $ size10 $ \(x :: BAII) ->
box x B.empty == biunit empty
test "vertexCount (box x y) <= vertexCount x * vertexCount y" $ size10 $ \(x :: BAII) (y :: BAII) ->
B.vertexCount (box x y) <= vertexCount x * vertexCount y
test "edgeCount (box x y) <= vertexCount x * edgeCount y + edgeCount x * vertexCount y" $ size10 $ \(x :: BAII) (y :: BAII) ->
B.edgeCount (box x y) <= vertexCount x * edgeCount y + edgeCount x * vertexCount y
putStrLn ""
test "box == boxWith (,) (,) (,) (,)" $ size10 $ \(x :: BAII) (y :: BAII) ->
box x y == boxWith (,) (,) (,) (,) x y
putStrLn "\n============ Bipartite.AdjacencyMap.consistent ============"
test "consistent empty == True" $
consistent empty == True
test "consistent (vertex x) == True" $ \x ->
consistent (vertex x) == True
test "consistent (edge x y) == True" $ \x y ->
consistent (edge x y) == True
test "consistent (edges x) == True" $ \x ->
consistent (edges x) == True
test "consistent (toBipartite x) == True" $ \x ->
consistent (toBipartite x) == True
test "consistent (swap x) == True" $ \x ->
consistent (swap x) == True
test "consistent (circuit xs) == True" $ \xs ->
consistent (circuit xs) == True
test "consistent (biclique xs ys) == True" $ \xs ys ->
consistent (biclique xs ys) == True
testBipartiteAdjacencyMapAlgorithm :: IO ()
testBipartiteAdjacencyMapAlgorithm = do
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.detectParts ============"
test "detectParts empty == Right empty" $
detectParts (AM.empty :: AI) == Right empty
test "detectParts (vertex 1) == Right (leftVertex 1)" $
detectParts (AM.vertex 1 :: AI) == Right (leftVertex 1)
test "detectParts (edge 1 1) == Left [1]" $
detectParts (AM.edge 1 1 :: AI) == Left [1]
test "detectParts (edge 1 2) == Right (edge 1 2)" $
detectParts (AM.edge 1 2 :: AI) == Right (edge 1 2)
test "detectParts (edge 0 (-1)) == Right (edge (-1) 0)" $
detectParts (AM.edge 0 (-1) :: AI) == Right (edge (-1) 0)
test "detectParts (1 * (2 + 3)) == Right (edges [(1, 2), (1, 3)])" $
detectParts (1 * (2 + 3) :: AI) == Right (edges [(1, 2), (1, 3)])
test "detectParts ((1 + 3) * (2 + 4) + 6 * 5) == Right (swap (1 + 3) * (2 + 4) + swap 5 * 6" $
detectParts ((1 + 3) * (2 + 4) + 6 * 5 :: AI) == Right (swap (1 + 3) * (2 * 4) + swap 5 * 6)
test "detectParts ((1 + 2) * (3 + 4) * (5 + 6)) == Left [1, 3, 2, 4, 5]" $
detectParts ((1 + 2) * (3 + 4) * (5 + 6) :: AI) == Left [1, 3, 2, 4, 5]
test "detectParts ((1 + 2) * (3 + 4) + (3 + 4) * 5) == Right (swap (1 + 2) * (3 + 4) + swap 5 * (3 + 4))" $
detectParts ((1 + 2) * (3 + 4) + (3 + 4) * 5 :: AI) == Right (swap (1 + 2) * (3 + 4) + swap 5 * (3 + 4))
test "detectParts (1 * 2 * 3) == Left [2, 3, 1]" $
detectParts (1 * 2 * 3 :: AI) == Left [1, 2, 3]
test "detectParts ((1 * 3 * 4) + 2 * (1 + 2)) == Left [2]" $
detectParts ((1 * 3 * 4) + 2 * (1 + 2) :: AI) == Left [2]
test "detectParts (clique [1..10]) == Left [1, 2, 3]" $
detectParts (AM.clique [1..10] :: AI) == Left [1, 2, 3]
test "detectParts (circuit [1..11]) == Left [1..11]" $
detectParts (AM.circuit [1..11] :: AI) == Left [1..11]
test "detectParts (circuit [1..10]) == Right (circuit [(2 * x - 1, 2 * x) | x <- [1..5]])" $
detectParts (AM.circuit [1..10] :: AI) == Right (circuit [(2 * x - 1, 2 * x) | x <- [1..5]])
test "detectParts (biclique [] xs) == Right (vertices xs [])" $ \(xs :: [Int]) ->
detectParts (AM.biclique [] xs :: AI) == Right (vertices xs [])
test "detectParts (biclique (map Left (x:xs)) (map Right ys)) == Right (biclique (map Left (x:xs)) (map Right ys))" $ \(x :: Int) (xs :: [Int]) (ys :: [Int]) ->
detectParts (AM.biclique (map Left (x:xs)) (map Right ys)) == Right (biclique (map Left (x:xs)) (map Right ys))
test "isRight (detectParts (star x ys)) == not (elem x ys)" $ \(x :: Int) (ys :: [Int]) ->
isRight (detectParts (AM.star x ys)) == (not $ elem x ys)
test "isRight (detectParts (fromBipartite (toBipartite x))) == True" $ \(x :: AII) ->
isRight (detectParts (fromBipartite (toBipartite x))) == True
-- TODO: Clean up these tests
putStrLn ""
test "((all ((flip Set.member) $ edgeSet $ symmetricClosure x) . edgeSet) <$> detectParts x) /= Right False" $ \(x :: AI) ->
((all ((flip Set.member) $ AM.edgeSet $ AM.symmetricClosure x) . edgeSet) <$> detectParts x) /= Right False
test "(Set.map $ fromEither) <$> (vertexSet <$> (detectParts (fromBipartite (toBipartite x)))) == Right (vertexSet x)" $ \(x :: AII) ->
((Set.map $ fromEither) <$> (vertexSet <$> (detectParts (fromBipartite (toBipartite x))))) == Right (AM.vertexSet x)
test "fromEither (Bifunctor.bimap ((flip Set.isSubsetOf) (vertexSet x) . Set.fromList) (const True) (detectParts x)) == True" $ \(x :: AI) ->
fromEither (Bifunctor.bimap ((flip Set.isSubsetOf) (AM.vertexSet x) . Set.fromList) (const True) (detectParts x))
test "fromEither (Bifunctor.bimap ((flip Set.isSubsetOf) (edgeSet (symmetricClosure x)) . AM.edgeSet . circuit) (const True) (detectParts x)) == True" $ \(x :: AI) ->
fromEither (Bifunctor.bimap ((flip Set.isSubsetOf) (AM.edgeSet (AM.symmetricClosure x)) . AM.edgeSet . AM.circuit) (const True) (detectParts x))
test "fromEither (Bifunctor.bimap (((==) 1) . ((flip mod) 2) . length) (const True) (detectParts x)) == True" $ \(x :: AI) ->
fromEither (Bifunctor.bimap (((==) 1) . ((flip mod) 2) . length) (const True) (detectParts x))
putStrLn "\n============ Show (Bipartite.AdjacencyMap.Algorithm.Matching a b) ============"
test "show (matching []) == \"matching []\"" $
show (matching [] :: MII) == "matching []"
test "show (matching [(2,'a'),(1,'b')]) == \"matching [(1,'b'),(2,'a')]\"" $
show (matching [(2,'a'),(1,'b')] :: MIC) == "matching [(1,'b'),(2,'a')]"
putStrLn "\n============ Eq (Bipartite.AdjacencyMap.Algorithm.Matching a b) ============"
test "(x == y) == ((pairOfLeft x == pairOfLeft y) && (pairOfRight x == pairOfRight y))" $ \(x :: MII) (y :: MII) ->
(x == y) == ((pairOfLeft x == pairOfLeft y) && (pairOfRight x == pairOfRight y))
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.pairOfLeft ============"
test "pairOfLeft (matching []) == Map.empty" $
pairOfLeft (matching [] :: MII) == Map.empty
test "pairOfLeft (matching [(2,'a'), (1,'b')]) == Map.fromList [(2,'a'), (1,'b')]" $
pairOfLeft (matching [(2,'a'), (1,'b')] :: MIC) == Map.fromList [(2,'a'), (1,'b')]
test "Map.size . pairOfLeft == Map.size . pairOfRight" $ \(x :: MII) ->
(Map.size . pairOfLeft) x ==(Map.size . pairOfRight) x
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.pairOfRight ============"
test "pairOfRight (matching []) == Map.empty" $
pairOfRight (matching [] :: MII) == Map.empty
test "pairOfRight (matching [(2,'a'), (1,'b')]) == Map.fromList [('a',2), ('b',1)]" $
pairOfRight (matching [(2,'a'), (1,'b')] :: MIC) == Map.fromList [('a',2), ('b',1)]
test "Map.size . pairOfRight == Map.size . pairOfLeft" $ \(x :: MII) ->
(Map.size . pairOfRight) x ==(Map.size . pairOfLeft) x
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.matching ============"
test "matching [(1,'a'), (1,'b')] == matching [(1,'b')]" $
matching [(1,'a'), (1,'b')] == (matching [(1,'b')] :: MIC)
test "matching [(1,'a'), (1,'b'), (2,'b'), (2,'a')] == matching [(2,'a')]" $
matching [(1,'a'), (1,'b'), (2,'b'), (2,'a')] == (matching [(2,'a')] :: MIC)
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.isMatchingOf ============"
test "isMatchingOf (matching []) x == True" $ \(x :: BAII) ->
isMatchingOf (matching []) x == True
test "isMatchingOf (matching xs) empty == null xs" $ \(xs :: [(Int, Int)]) ->
isMatchingOf (matching xs) empty == null xs
test "isMatchingOf (matching [(x,y)]) (edge x y) == True" $ \(x :: Int) (y :: Int) ->
isMatchingOf (matching [(x,y)]) (edge x y) == True
test "isMatchingOf (matching [(1,2)]) (edge 2 1) == False" $
isMatchingOf (matching [(1,2)]) (edge 2 1 :: BAII) == False
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.matchingSize ============"
test "matchingSize (matching []) == 0" $
matchingSize (matching [] :: MII) == 0
test "matchingSize (matching [(2,'a'), (1,'b')]) == 2" $
matchingSize (matching [(2,'a'), (1,'b')] :: MIC) == 2
test "matchingSize (matching [(1,'a'), (1,'b')]) == 1" $
matchingSize (matching [(1,'a'), (1,'b')] :: MIC) == 1
test "matchingSize (matching xs) <= length xs" $ \(xs :: [(Int, Int)]) ->
matchingSize (matching xs) <= length xs
test "matchingSize x == Map.size . pairOfLeft" $ \(x :: MII) ->
matchingSize x ==(Map.size . pairOfLeft) x
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.maxMatching ============"
test "maxMatching empty == matching []" $
maxMatching (empty :: BAII) == matching []
test "maxMatching (vertices xs ys) == matching []" $ \(xs :: [Int]) (ys :: [Int]) ->
maxMatching (vertices xs ys) == matching []
test "maxMatching (path [1,2,3,4]) == matching [(1,2), (3,4)]" $
maxMatching (path ([1,2,3,4] :: LII)) == matching [(1,2), (3,4)]
test "matchingSize (maxMatching (circuit [(1,2), (3,4), (5,6)])) == 3" $
matchingSize (maxMatching (circuit [(1,2), (3,4), (5,6)] :: BAII)) == 3
test "matchingSize (maxMatching (star x (y:ys))) == 1" $ \(x :: Int) (y :: Int) (ys :: [Int]) ->
matchingSize (maxMatching (star x (y:ys))) == 1
test "matchingSize (maxMatching (biclique xs ys)) == min (length (nub xs)) (length (nub ys))" $ \(xs :: [Int]) (ys :: [Int]) ->
matchingSize (maxMatching (biclique xs ys)) == min (length (nub xs)) (length (nub ys))
test "isMatchingOf (maxMatching x) x == True" $ \(x :: BAII) ->
isMatchingOf (maxMatching x) x == True
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.isVertexCoverOf ============"
test "isVertexCoverOf (xs , ys ) empty == Set.null xs && Set.null ys" $ \(xs :: Set Int) (ys :: Set Int) ->
isVertexCoverOf (xs , ys ) empty ==(Set.null xs && Set.null ys)
test "isVertexCoverOf (xs , ys ) (leftVertex x) == Set.isSubsetOf xs (Set.singleton x) && Set.null ys" $ \(x :: Int) (xs :: Set Int) (ys :: Set Int) ->
isVertexCoverOf (xs , ys ) (leftVertex x) ==(Set.isSubsetOf xs (Set.singleton x) && Set.null ys)
test "isVertexCoverOf (Set.empty , Set.empty ) (edge x y) == False" $ \(x :: Int) (y :: Int) ->
isVertexCoverOf (Set.empty , Set.empty ) (edge x y) == False
test "isVertexCoverOf (Set.singleton x, ys ) (edge x y) == Set.isSubsetOf ys (Set.singleton y)" $ \(x :: Int) (y :: Int) (ys :: Set Int) ->
isVertexCoverOf (Set.singleton x, ys ) (edge x y) == Set.isSubsetOf ys (Set.singleton y)
test "isVertexCoverOf (xs , Set.singleton y) (edge x y) == Set.isSubsetOf xs (Set.singleton x)" $ \(x :: Int) (y :: Int) (xs :: Set Int) ->
isVertexCoverOf (xs , Set.singleton y) (edge x y) == Set.isSubsetOf xs (Set.singleton x)
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.minVertexCover ============"
test "minVertexCover empty == (Set.empty, Set.empty)" $
minVertexCover (empty :: BAII) == (Set.empty, Set.empty)
test "minVertexCover (vertices xs ys) == (Set.empty, Set.empty)" $ \(xs :: [Int]) (ys :: [Int]) ->
minVertexCover (vertices xs ys) == (Set.empty, Set.empty)
test "minVertexCover (path [1,2,3]) == (Set.empty, Set.singleton 2)" $
minVertexCover (path [1,2,3] :: BAII) == (Set.empty, Set.singleton 2)
test "minVertexCover (star x (1:2:ys)) == (Set.singleton x, Set.empty)" $ \(x :: Int) (ys :: [Int]) ->
minVertexCover (star x (1:2:ys) :: BAII) == (Set.singleton x, Set.empty)
test "vertexCoverSize (minVertexCover (biclique xs ys)) == min (length (nub xs)) (length (nub ys))" $ size10 $ \(xs :: [Int]) (ys :: [Int]) ->
vertexCoverSize (minVertexCover (biclique xs ys)) == min (length (nub xs)) (length (nub ys))
test "vertexCoverSize . minVertexCover == matchingSize . maxMatching" $ \(x :: BAII) ->
(vertexCoverSize . minVertexCover) x ==(matchingSize . maxMatching) x
test "isVertexCoverOf (minVertexCover x) x == True" $ \(x :: BAII) ->
isVertexCoverOf (minVertexCover x) x == True
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.isIndependentSetOf ============"
test "isIndependentSetOf (xs , ys ) empty == Set.null xs && Set.null ys" $ \(xs :: Set Int) (ys :: Set Int) ->
isIndependentSetOf (xs , ys ) empty ==(Set.null xs && Set.null ys)
test "isIndependentSetOf (xs , ys ) (leftVertex x) == Set.isSubsetOf xs (Set.singleton x) && Set.null ys" $ \(x :: Int) (xs :: Set Int) (ys :: Set Int) ->
isIndependentSetOf (xs , ys ) (leftVertex x) ==(Set.isSubsetOf xs (Set.singleton x) && Set.null ys)
test "isIndependentSetOf (Set.empty , Set.empty ) (edge x y) == True" $ \(x :: Int) (y :: Int) ->
isIndependentSetOf (Set.empty , Set.empty ) (edge x y) == True
test "isIndependentSetOf (Set.singleton x, ys ) (edge x y) == Set.null ys" $ \(x :: Int) (y :: Int) (ys :: Set Int) ->
isIndependentSetOf (Set.singleton x, ys ) (edge x y) == Set.null ys
test "isIndependentSetOf (xs , Set.singleton y) (edge x y) == Set.null xs" $ \(x :: Int) (y :: Int) (xs :: Set Int) ->
isIndependentSetOf (xs , Set.singleton y) (edge x y) == Set.null xs
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.maxIndependentSet ============"
test "maxIndependentSet empty == (Set.empty, Set.empty)" $
maxIndependentSet (empty :: BAII) == (Set.empty, Set.empty)
test "maxIndependentSet (vertices xs ys) == (Set.fromList xs, Set.fromList ys)" $ \(xs :: [Int]) (ys :: [Int]) ->
maxIndependentSet (vertices xs ys) == (Set.fromList xs, Set.fromList ys)
test "maxIndependentSet (path [1,2,3]) == (Set.fromList [1,3], Set.empty)" $
maxIndependentSet (path [1,2,3] :: BAII) == (Set.fromList [1,3], Set.empty)
test "maxIndependentSet (star x (1:2:ys)) == (Set.empty, Set.fromList (1:2:ys))" $ \(x :: Int) (ys :: [Int]) ->
maxIndependentSet (star x (1:2:ys)) == (Set.empty, Set.fromList (1:2:ys))
test "independentSetSize (maxIndependentSet (biclique xs ys)) == max (length (nub xs)) (length (nub ys))" $ \(xs :: [Int]) (ys :: [Int]) ->
independentSetSize (maxIndependentSet (biclique xs ys)) == max (length (nub xs)) (length (nub ys))
test "independentSetSize (maxIndependentSet x) == vertexCount x - vertexCoverSize (minVertexCover x)" $ \(x :: BAII) ->
independentSetSize (maxIndependentSet x) == vertexCount x - vertexCoverSize (minVertexCover x)
test "isIndependentSetOf (maxIndependentSet x) x == True" $ \(x :: BAII) ->
isIndependentSetOf (maxIndependentSet x) x == True
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.augmentingPath ============"
test "augmentingPath (matching []) empty == Left (Set.empty, Set.empty)" $
augmentingPath (matching []) (empty :: BAII) == Left (Set.empty, Set.empty)
test "augmentingPath (matching []) (edge 1 2) == Right [1,2]" $
augmentingPath (matching []) (edge 1 2) == Right ([1,2] :: LII)
test "augmentingPath (matching [(1,2)]) (path [1,2,3]) == Left (Set.empty, Set.singleton 2)" $
augmentingPath (matching [(1,2)]) (path [1,2,3] :: BAII) == Left (Set.empty, Set.singleton 2)
test "augmentingPath (matching [(3,2)]) (path [1,2,3,4]) == Right [1,2,3,4]" $
augmentingPath (matching [(3,2)]) (path [1,2,3,4]) == Right ([1,2,3,4] :: LII)
test "isLeft (augmentingPath (maxMatching x) x) == True" $ \(x :: BAII) ->
isLeft (augmentingPath (maxMatching x) x) == True
putStrLn "\n============ Bipartite.AdjacencyMap.Algorithm.consistentMatching ============"
test "consistentMatching (matching xs) == True" $ \(xs :: [(Int,Int)]) ->
consistentMatching (matching xs) == True
test "consistentMatching (maxMatching x) == True" $ \(x :: BAII) ->
consistentMatching (maxMatching x) == True
| snowleopard/alga | test/Algebra/Graph/Test/Bipartite/AdjacencyMap.hs | mit | 60,910 | 0 | 20 | 20,018 | 16,940 | 8,331 | 8,609 | -1 | -1 |
{-# htermination id :: a -> a #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_id_1.hs | mit | 34 | 0 | 2 | 8 | 3 | 2 | 1 | 1 | 0 |
-- | Convert linear constraints that only mention one variable to bounds
module Numeric.Limp.Canon.Simplify.Bounder where
import Numeric.Limp.Canon.Constraint
import Numeric.Limp.Canon.Linear
import Numeric.Limp.Canon.Program
import Numeric.Limp.Rep
import Numeric.Limp.Error
import Data.Either
import qualified Data.Map as M
type Bound z r c = (Either z r, (Maybe (R c), Maybe (R c)))
-- | Convert a single constraint into a bound, if possible.
--
-- > bounder $ Constraint (5 <= y <= 10)
-- > == Bound (Just 5) y (Just 10)
--
-- > bounder $ Constraint (5 <= 2y <= 10)
-- > == Bound (Just 2.5) y (Just 5)
--
-- > bounder $ Constraint (10 <= 2y <= 5)
-- > == Left InfeasibleBoundEmpty
--
bounderConstraint1 :: (Ord z, Ord r, Rep c) => Constraint1 z r c -> Either Infeasible (Maybe (Bound z r c))
bounderConstraint1 (C1 low (Linear mf) upp)
| M.size mf == 1
, [(k,c)] <- M.toList mf
, c /= 0
= let fixup = (/ c)
low' = fmap fixup low
upp' = fmap fixup upp
bounds
| c >= 0
= (low',upp')
| otherwise
= (upp',low')
valid
| (Just lo, Just hi) <- bounds
= lo <= hi
| otherwise
= True
in if valid
then Right $ Just (k, bounds)
else Left InfeasibleNotIntegral
| otherwise
= Right Nothing
bounderConstraint :: (Ord z, Ord r, Rep c) => Constraint z r c -> Either Infeasible (Constraint z r c, [Bound z r c])
bounderConstraint (Constraint cs)
= do (cs', bs) <- partitionEithers <$> mapM bounderC cs
return (Constraint cs', bs)
where
bounderC c
= do c' <- bounderConstraint1 c
return $ case c' of
Nothing -> Left c
Just b -> Right b
--
bounderProgram :: (Ord z, Ord r, Rep c) => Program z r c -> Either Infeasible (Program z r c)
bounderProgram p
= do (c',bs) <- bounderConstraint $ _constraints p
return $ p
{ _constraints = c'
, _bounds = foldl merge (_bounds p) bs }
where
merge m (k,v)
= case M.lookup k m of
Just v'
-> M.insert k (mergeBounds v' v) m
Nothing
-> M.insert k v m
| amosr/limp | src/Numeric/Limp/Canon/Simplify/Bounder.hs | mit | 2,150 | 0 | 15 | 632 | 738 | 387 | 351 | 53 | 2 |
{-# htermination (fromIntMyInt :: MyInt -> MyInt) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
fromIntMyInt :: MyInt -> MyInt
fromIntMyInt x = x;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/fromInt_3.hs | mit | 271 | 0 | 8 | 65 | 83 | 49 | 34 | 7 | 1 |
-- Name : Lotus Sudoku Solver
-- Author: Sharynne Azhar
-- Description : A Haskell-based solver
-- Updated: 05-02-2016
{-# OPTIONS_HADDOCK prune #-}
module Main where
import Data.List
import Data.List.Split
import Data.Maybe (fromMaybe)
import qualified Data.Map as Map
type Lotus = [Int]
type Solns = [Int]
type Indices = [Int]
type Index = Int
{--------------------------
----- ACCESSORS ----------
--------------------------}
-- | A list of a list of indices for each left opening arc
leftArcs :: [Indices]
leftArcs = [[0,7,15,22,30,37,45],[1,8,16,23,31,38,46],[2,9,17,24,32,39,47],
[3,10,18,25,33,40,48],[4,11,19,26,34,41,42],[5,12,20,27,28,35,43],
[6,13,14,21,29,36,44]]
-- | A list of indices for each right opening arc
rightArcs :: [Indices]
rightArcs = [[0,13,20,26,33,39,46],[1,7,14,27,34,40,47],[2,8,15,21,28,41,48],
[3,9,16,22,29,35,42],[4,10,17,23,30,36,43], [5,11,18,24,31,37,44],
[6,12,19,25,32,38,45]]
-- | Finds and returns the list of indices containing that index value.
-- It generates mapping to a list of tuples containing the index and the list containing
-- that index. For example, (34,[2,9,17,24,32,39,47]). Then, using the index as a map
-- key, the finds and returns the list of indices.
--
-- Resource at http://stackoverflow.com/questions/36878340/
getIndices :: (Ord a) => [[a]] -- ^ a list containing a list of indices
-> a -- ^ the current index position
-> [a] -- ^ the list of indices containing that index position
getIndices lst ind = fromMaybe [] (Map.lookup ind listOfIndices)
where listOfIndices = Map.fromList mappedIndices
mappedIndices = concatMap (\x -> zip x (repeat x)) lst
-- | Concatenates list of the arc and ring indices containing the current index and returns it.
getArcRings :: Index -- ^ the current index position
-> Indices -- ^ the list of of indices in the arcs and ring containing that index
getArcRings ind = getRing ind ++ getIndices leftArcs ind ++ getIndices rightArcs ind
where getRing n = [x..x + 6] where x = 7 * div n 7
-- | Returns the values from the lotus puzzle based on the indices
getValues :: Indices -- ^ a list of indices
-> Lotus -- ^ the lotus puzzle
-> [Int] -- ^ a list of corresponding values at each index given
getValues lts = map (lts !!)
{--------------------------
-------- SOLVER ----------
--------------------------}
-- | Creates a new lotus with the new value inserted at index given
returnBoard :: Int -- ^ a value to insert/replace
-> Index -- ^ index where the value should be inserted/replaced
-> Lotus -- ^ the lotus puzzle
-> Lotus -- ^ a new lotus containing the new value
returnBoard val ind lts = take ind lts ++ [val] ++ drop (ind + 1) lts
-- | Lists all the possible values that can be a solution to a particular ring/arc.
possibleSolns :: Index -- ^ the current index
-> Lotus -- ^ the lotus puzzle
-> Solns -- ^ a list of possible solutions that can be at the current index
possibleSolns ind lts
| ind > 48 = []
| lts !! ind == 0 = [1..7] \\ arcRingIndices ind
| otherwise = [lts !! ind]
where arcRingIndices n = getValues lts (getArcRings n)
-- | Solves the lotus puzzle using recursion (i.e. brute force method).
-- For every nonzero position in the Lotus, the solver tries all the possible
-- values until the Lotus is complete. An unsolvable lotus puzzle will return
-- an empty list.
doSolve :: Index -- ^ the current index
-> Solns -- ^ the list of possible solutions
-> Lotus -- ^ the lotus puzzle
-> Lotus -- ^ the solved (or empty) lotus puzzle
doSolve 48 [x] lts = returnBoard x 48 lts
doSolve 48 [] _ = []
doSolve 48 _ _ = []
doSolve _ [] _ = []
doSolve ind (x:xs) lts
| null solvedNext = doSolve ind xs lts
| otherwise = solvedNext
where recurseNext n s = doSolve (n + 1) (possibleSolns (n + 1) s) s
solvedNext = recurseNext ind (returnBoard x ind lts)
-- | Higher order solve method to tie the rest together
lotusSolver :: [Int] -- ^ the unsolved lotus puzze
-> [Int] -- ^ a solved puzzle
lotusSolver lts = doSolve 0 (possibleSolns 0 lts) lts
{--------------------------
----- HELPERS/TESTS ------
--------------------------}
-- | Prints a readable lotus in matrix form to console
--
-- Resource from http://stackoverflow.com/questions/12791400
printLotus :: (Show e) => [e] -- ^ the lotus puzzle
-> String -- ^ an aesthetically pleasing lotus
printLotus lts = if null lts then "\nNo solution\n"
else "\n" ++ unlines (map show (chunksOf 7 lts))
-- | Checks if the arc/ring contains the numbers 1 to 7.
-- Verifies that each value in the list of indices is between 1 to 7 and that no
-- value is repeated. To increase the efficiency, the list was first sorted and then
-- each element was compared pairwise with the rest of the list.
--
-- Resource from http://stackoverflow.com/questions/31036474/
checkValues :: Indices -- ^ the list of indices to check
-> Bool -- ^ true if the list contains values 1 to 7 with no repeats
checkValues ind = all (`elem` [1..7]) ind && allDifferent ind
where allDifferent = comparePairwise.sort
comparePairwise n = and (zipWith (/=) n (drop 1 n))
-- | Checks all the arcs and the ring containing the given index
checkAll :: Lotus -- ^ the lotus puzzle
-> Index -- ^ the current index
-> Bool -- ^ true if all conditions of a complete lotus are satisfied
checkAll lts ind = all func [getIndices leftArcs ind, getIndices rightArcs ind, getRing ind]
where getRing n = [x..x + 6] where x = 7 * div n 7
func n = checkValues (getValues lts n)
-- | Tests solvable puzzles
runTest :: String -- ^ name of the test
-> Lotus -- ^ the lotus puzzle
-> String -- ^ success or failed text
runTest name lts = do
let solvedLotus = lotusSolver lts
if all (checkAll solvedLotus) [0..48] then show name ++ " passed!"
else "### " ++ show name ++ " failed! ###"
-- | Tests unsolvable puzzles
runFailedTest :: String -- ^ name of the test
-> Lotus -- ^ the lotus puzzle
-> String -- ^ success or failed text
runFailedTest name lts
| null solvedLotus = printSuccess
| not (all (checkAll solvedLotus) [0..48]) = printSuccess
| otherwise = "### " ++ show name ++ " failed! ###"
where solvedLotus = lotusSolver lts
printSuccess = show name ++ " passed!"
{--------------------------
---------- MAIN -----------
--------------------------}
main :: IO()
main = do
putStrLn "\nTestA\n===================="
putStrLn $ "Before:" ++ printLotus testA
putStrLn $ "After:" ++ printLotus (lotusSolver testA)
putStrLn $ "Check: " ++ runTest "testA" testA
putStrLn "\nSuccess Cases\n===================="
putStrLn $ runTest "testB" testB
putStrLn $ runTest "testC" testC
putStrLn $ runTest "testD" testD
putStrLn $ runTest "testE" testE
putStrLn "\n\nFailed Cases\n===================="
putStrLn $ runFailedTest "testF" testF
putStrLn $ runFailedTest "testG" testG
{--------------------------
--- EXAMPLES PUZZLES ------
--------------------------}
-- Success Tests
testA :: Lotus
testA = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,0,0,0,3,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0]
testB :: Lotus
testB = [4,1,2,3,6,0,0,0,0,0,0,1,0,0,0,1,7,4,0,0,2,0,0,0,0,1,0,
5,3,0,0,4,0,0,0,0,7,0,0,0,0,0,0,1,2,0,0,0,0]
testC :: Lotus
testC = [0,1,0,7,6,0,0,4,0,0,1,0,0,0,0,0,6,0,0,5,0,0,0,0,0,0,0,
5,0,0,0,0,0,2,0,2,0,0,0,0,0,0,0,4,0,0,0,0,0]
testD :: Lotus
testD = [4,0,5,3,0,1,7,1,7,0,0,0,0,0,0,0,6,0,0,5,2,1,2,3,0,0,0,
5,6,0,7,4,0,1,3,0,0,0,0,0,0,0,1,4,0,6,0,7,0]
testE :: Lotus
testE = [5,0,0,0,1,6,0,0,0,0,3,0,0,0,7,0,6,2,1,0,0,0,1,7,0,0,6,
0,0,5,0,3,6,7,2,0,0,2,1,0,0,4,0,0,4,0,0,1,0]
-- Fail Tests
testF :: Lotus
testF = [4,4,7,2,1,6,3,6,5,4,3,7,2,1,7,3,6,2,1,5,4,2,1,7,5,4,6,3,
1,5,4,3,6,7,2,7,6,2,1,3,5,4,3,5,4,7,2,1,6]
testG :: Lotus
testG = [5,0,0,0,1,6,0,0,5,5,3,0,0,0,7,0,6,2,1,0,0,0,1,7,0,0,6,
0,0,5,0,3,6,7,2,0,0,2,1,0,0,4,0,0,4,0,0,1,0]
| sharynneazhar/lotus_sudoku | lotus.hs | mit | 8,380 | 0 | 12 | 1,902 | 2,778 | 1,667 | 1,111 | 127 | 2 |
module Main (main) where
import System.Exit
import Control.Monad
import qualified Data.ByteString.Lazy as L
import Client
main :: IO ()
main = do
(success, output) <- client
L.putStr output
unless success exitFailure
| hspec/sensei | driver/seito.hs | mit | 256 | 0 | 8 | 70 | 75 | 42 | 33 | 10 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
-- | Defines what can be a Server's State, and some utility functions for that.
module Hetcons.Hetcons_State
( Hetcons_State
, Participant_State_Var
, Participant_State
, Observer_State_Var
, Observer_State
, default_State
, conflicting_state
, new_State
, start_State
, modify
, read
, modify_and_read
, state_by_observers
) where
import Hetcons.Instances_1b_2a ()
import Hetcons.Signed_Message
( Recursive_1b
,Verified
,Recursive_2b )
import Hetcons.Value
( Contains_1a
,extract_observer_quorums
,Value
,garbage_collect
,conflicts
)
import Control.Concurrent.MVar
( MVar
,modifyMVar_
,modifyMVar
,newMVar
,readMVar )
import Data.Foldable ( toList, any )
import qualified Data.HashSet as HashSet ( map, filter )
import Data.HashSet ( HashSet, fromList, empty )
import Data.Hashable ( Hashable )
import Prelude
( (.), (==), Bool, Foldable, IO, ($), return, Eq, id )
-- | To be a State type (the state kept by a server), you need a `write_prep` function, which is run on an object before saving it to state.
-- This is where, say, any garbage collection would go.
class Hetcons_State a where
write_prep :: a -> a
-- | Participants store literally the set of 1b messages received or sent thus far (or at least those which have been verified)
type Participant_State v = HashSet (Verified (Recursive_1b v))
-- | Therefore, the `write_prep` for a `Participant_State` is `garbage_collect`, as defined in `Value`.
instance (Value v) => Hetcons_State (Participant_State v) where
write_prep = garbage_collect
-- | Mutable references to Participant State that work in the Hetcons_Transaction monad.
type Participant_State_Var v = MVar (Participant_State v)
-- | Observers store the set of 2b messages received or sent thus far (or at least those which have been verified).
type Observer_State v = HashSet (Verified (Recursive_2b v))
-- | For now, the `write_prep` for an `Observer_State` is `id`, meaning it does nothing.
-- TODO: Can this be made more efficient? When can we delete 2bs from history?
instance Hetcons_State (Observer_State v) where
write_prep = id
-- | Mutable references to Observer State that work in the Hetcons_Transaction monad.
type Observer_State_Var v = MVar (Observer_State v)
-- | The "Start" or "default" state for both Observers and Participants happens to be the empty set.
default_State :: (HashSet a)
default_State = empty
class State_by_Observers a where
state_by_observers :: (Contains_1a (a v) v, Hashable (Verified (a v)), Eq (Verified (a v))) => (HashSet (Verified (a v))) -> (HashSet (HashSet (Verified (a v))))
-- | Subsets of the proposals which have the same Condensed Observer Graph, for each Condensed Observer Graph in the State.
-- strict superset of :: Participant_State -> (HashSet (Participant_State))
-- and :: Observer_State -> (HashSet ( Observer_State))
instance State_by_Observers Recursive_1b where
state_by_observers s = (HashSet.map (\x -> (HashSet.filter ((x ==) . extract_observer_quorums) s)) -- 1bs per COG
(HashSet.map extract_observer_quorums s)) -- all the COGs
instance State_by_Observers Recursive_2b where
state_by_observers s = (HashSet.map (\x -> (HashSet.filter ((x ==) . extract_observer_quorums) s)) -- 2bs per COG
(HashSet.map extract_observer_quorums s)) -- all the COGs
-- | Are there any conflicting proposals in this state?
-- Bear in mind that two proposals with different COGs NEVER CONFLICT.
-- We make no guarantees about different COGs.
-- This is not implemented in a computationally efficient manner.
conflicting_state :: (Value v) => (Participant_State v) -> Bool
conflicting_state = conflicts
-- | A reference to a new state containing all of the elements fo the given input
-- a strict superset of :: (Foldable t) => (t (Verified Recursive_1b)) -> IO Participant_State_Var
new_State :: (Foldable t, Hashable a, Eq a) => (t a) -> IO (MVar (HashSet a))
new_State = newMVar . fromList . toList
-- | a reference to a new, empty, state
-- a strict superset of :: IO Participant_State_Var
start_State :: (Hashable a, Eq a, Hetcons_State (HashSet a)) => IO (MVar (HashSet a))
start_State = new_State []
-- | Returns the present value of the mutable state reference given
-- a strict superset of :: Participant_State_Var -> IO Participant_State
read :: (MVar a) -> IO a
read = readMVar
-- | applies the given function to the state in the mutable state reference given (and then applies `write_prep`)
-- strict superset of :: Participant_State_Var -> (Participant_State -> Participant_State) -> IO ()
modify :: (Hetcons_State a) => (MVar a) -> (a -> a) -> IO ()
modify s f = modifyMVar_ s $ return . write_prep . f
-- | applies the given function to the state in the mutable state reference given (with `write_prep`), and also returns the second output of the function.
-- strict superset of :: Participant_State_Var -> (Participant_State -> (Participant_State, a)) -> IO a
modify_and_read :: (Hetcons_State a) => (MVar a) -> (a -> (IO (a, b))) -> IO b
modify_and_read s f = modifyMVar s (\v -> do { (v', r) <- f v
; return (write_prep v', r)})
| isheff/hetcons | src/Hetcons/Hetcons_State.hs | mit | 5,456 | 0 | 16 | 1,129 | 989 | 564 | 425 | 73 | 1 |
{-# LANGUAGE RankNTypes, OverloadedStrings #-}
module Main where
import Data.Text as T
import Reflex as Reflex
import Reflex.Host.Class (newEventWithTriggerRef, runHostFrame, fireEvents)
import Control.Monad.Fix (MonadFix)
import Control.Monad.Identity (Identity(..))
import Control.Monad.IO.Class (liftIO)
import Data.IORef (readIORef)
import Data.Dependent.Sum (DSum ((:=>)))
import qualified Graphics.UI.FLTK.LowLevel.FL as FL
import Graphics.UI.FLTK.LowLevel.Fl_Types
import Graphics.UI.FLTK.LowLevel.FLTKHS as FL
import Graphics.UI.FLTK.LowLevel.Fl_Enumerations as FL
type TypingApp t m = (Reflex t, MonadHold t m, MonadFix m)
=> Reflex.Event t Char
-> m (Behavior t String)
guest :: TypingApp t m
guest e = do
d <- foldDyn (:) [] e
return $ fmap Prelude.reverse $ (current d)
makeWindow :: (FL.Ref DoubleWindow -> FL.Event -> IO (Either UnknownEvent ())) -> IO (FL.Ref DoubleWindow)
makeWindow handler = do
w <- doubleWindowCustom
(toSize (538,413))
(Just (toPosition (113,180)))
(Just "FLTKHS Reflex Host Port")
Nothing
(defaultCustomWidgetFuncs
{
handleCustom = Just handler
})
defaultCustomWindowFuncs
setColor w whiteColor
setLabelfont w helveticaBold
setVisible w
return w
makeDescription :: IO ()
makeDescription = do
description <- textDisplayNew (toRectangle (30,35,478,90)) Nothing
setLabel description "Reflex Host Example"
setBox description NoBox
setLabelfont description helveticaBold
setWhen description [WhenNever]
setTextfont description courier
setTextsize description (FontSize 12)
wrapMode description WrapAtBounds
dBuffer <- textBufferNew Nothing Nothing
setText dBuffer "\n\nThis is a port of the 'host' example that ships with 'try-reflex'.\n\nType anywhere and the accumulated output will show up below."
setBuffer description (Just dBuffer)
makeOutput :: IO (FL.Ref TextBuffer, FL.Ref TextDisplay)
makeOutput = do
o <- textDisplayNew (toRectangle (30,144,478,236)) Nothing
setLabel o "Output:"
setBox o BorderFrame
setTextfont o courier
setTextsize o (FontSize 12)
setWhen o [WhenNever]
b <- textBufferNew Nothing Nothing
setBuffer o (Just b)
return (b,o)
outputChanged :: FL.Ref TextBuffer -> T.Text -> IO Bool
outputChanged buffer newText = do
oldText <- getText buffer
if (not (T.null oldText))
then do
let oldLastLine = Prelude.head (Prelude.reverse (T.lines oldText))
return (newText /= oldLastLine)
else return (not (T.null newText))
host :: (forall t m. TypingApp t m) -> IO ()
host myGuest =
runSpiderHost $ do
(e, eTriggerRef) <- newEventWithTriggerRef
let windowHandler :: FL.Ref DoubleWindow -> FL.Event -> IO (Either UnknownEvent ())
windowHandler window fltkEvent =
case fltkEvent of
Keydown -> do
keyPressed <- FL.eventText
eventTrigger <- liftIO (readIORef eTriggerRef)
if (not (T.null keyPressed))
then runSpiderHost $
case eventTrigger of
Nothing -> return ()
Just event ->
fireEvents [event :=> Identity (Prelude.head (T.unpack keyPressed))] >>
liftIO (return ())
else return ()
return (Right ())
_ -> handleSuper window fltkEvent
b <- runHostFrame (myGuest e)
liftIO $ do
w <- makeWindow windowHandler
begin w
makeDescription
(buffer,o) <- makeOutput
end w
setResizable w (Just o)
showWidget w
go (do
leftTodo <- FL.wait
return (leftTodo > 0)
)
(runSpiderHost $ do
output <- runHostFrame (sample b)
liftIO $ do
newOutput <- outputChanged buffer (T.pack output)
if newOutput
then do
emptyBuffer <- getText buffer >>= return . T.null
appendToBuffer buffer (T.pack (if emptyBuffer then output else ("\n" ++ output)))
else return ())
where
go :: IO Bool -> IO () -> IO ()
go predicateM action = do
predicate <- predicateM
if predicate
then action >> go predicateM action
else return ()
main :: IO ()
main = host guest
| deech/fltkhs-reflex-host | src/reflex-host.hs | mit | 4,396 | 0 | 30 | 1,238 | 1,408 | 698 | 710 | 116 | 7 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Queens (boardString, canAttack)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = do
-- Track-specific test cases.
describe "boardString" $ do
it "empty board" $ boardString Nothing Nothing
`shouldBe` unlines [ "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _" ]
it "board with just white queen" $ boardString (Just (2, 4)) Nothing
`shouldBe` unlines [ "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ W _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _" ]
it "board with just black queen" $ boardString Nothing (Just (0, 0))
`shouldBe` unlines [ "B _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _" ]
it "board" $ boardString (Just (2, 4)) (Just (6, 6))
`shouldBe` unlines [ "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ W _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ _ _"
, "_ _ _ _ _ _ B _"
, "_ _ _ _ _ _ _ _" ]
-- The function described by the reference file as `create` doesn't
-- exist in this track, so only the `canAttack` test cases were
-- implemented here
describe "canAttack" $ do
let test (description, white, black, expected) =
it description $ canAttack white black `shouldBe` expected
cases = [ ("can not attack" , (2, 4), (6, 6), False)
, ("can attack on same rank" , (2, 4), (2, 6), True )
, ("can attack on same file" , (4, 5), (2, 5), True )
, ("can attack on first diagonal" , (2, 2), (0, 4), True )
, ("can attack on second diagonal", (2, 2), (3, 1), True )
, ("can attack on third diagonal" , (2, 2), (1, 1), True )
, ("can attack on fourth diagonal", (2, 2), (5, 5), True ) ]
for_ cases test
-- cc646595d39e13c4d310da2629599bcc45e92bd9
| exercism/xhaskell | exercises/practice/queen-attack/test/Tests.hs | mit | 3,122 | 0 | 15 | 1,557 | 606 | 361 | 245 | 57 | 1 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.NamenodeProtocolProtos.RollEditLogRequestProto (RollEditLogRequestProto(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data RollEditLogRequestProto = RollEditLogRequestProto{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable RollEditLogRequestProto where
mergeAppend RollEditLogRequestProto RollEditLogRequestProto = RollEditLogRequestProto
instance P'.Default RollEditLogRequestProto where
defaultValue = RollEditLogRequestProto
instance P'.Wire RollEditLogRequestProto where
wireSize ft' self'@(RollEditLogRequestProto)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(RollEditLogRequestProto)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> RollEditLogRequestProto) RollEditLogRequestProto where
getVal m' f' = f' m'
instance P'.GPB RollEditLogRequestProto
instance P'.ReflectDescriptor RollEditLogRequestProto where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.hdfs.namenode.RollEditLogRequestProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"NamenodeProtocolProtos\"], baseName = MName \"RollEditLogRequestProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"NamenodeProtocolProtos\",\"RollEditLogRequestProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType RollEditLogRequestProto where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg RollEditLogRequestProto where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/NamenodeProtocolProtos/RollEditLogRequestProto.hs | mit | 2,898 | 1 | 16 | 533 | 554 | 291 | 263 | 53 | 0 |
import Data.Vector (Vector, generate, slice, toList, fromList, (!), (//))
import Data.Either.Unwrap
import Data.Maybe
import Data.List
import System.IO
data Player = X | O deriving (Eq, Show, Enum)
type Tile = Either Int Player
type Board = Vector Tile
data State = State { board :: Board, player :: Player } deriving (Eq, Show)
data GameTree = GameTree {state :: State, children :: [GameTree]}
initialState :: State
initialState = State (generate 9 (\i -> Left i)) X
nextStates :: State -> [State]
nextStates s = filter (\x -> board x /= board s) $ map (makeMove s) [0..8]
makeMove :: State -> Int -> State
makeMove s i = State newBoard (nextPlayer $ player s)
where
t = board s ! i
newBoard
| isLeft t = board s // [(i, Right $ player s)]
| otherwise = board s
gameTree :: State -> GameTree
gameTree s = GameTree s (map gameTree $ nextStates s)
nextPlayer :: Player -> Player
nextPlayer X = O
nextPlayer O = X
negamax :: Int -> Player -> State -> Int
negamax d p s
| winnerOf s == Just p = 100
| winnerOf s == Just (nextPlayer p) = -100
| d == 0 || (all isRight $ board s) = 0
| otherwise = minimum $
map (negate . negamax (d - 1) (nextPlayer p)) (nextStates s)
winnerOf :: State -> Maybe Player
winnerOf s = maybe Nothing id $
find isJust $ map winner rows ++ map winner cols ++ map winner diags
where
winner [a, b, c] =
if all isRight [a, b, c] && a == b && b == c
then Just (fromRight a) else Nothing
rows = map (\i -> toList $ slice (3*i) 3 (board s)) [0..2]
cols = map (\i -> map ((!) $ board s) [i, i+3, i+6]) [0..2]
diags = map (map ((!) (board s))) [[0, 4, 8], [2, 4, 6]]
-- temporary tests to see if shit works
main = do
let tree = gameTree initialState
print $ state tree
print $ map state $ take 2 $ children tree
-- print $ map (negamax 10 X) $ nextStates
-- $ State (fromList [Right X, Right O, Left 2,
-- Left 3, Left 4, Left 5,
-- Left 6, Left 7, Left 8]) X
| Jinxit/mcts | TicTacToe.hs | mit | 2,119 | 0 | 13 | 626 | 901 | 472 | 429 | -1 | -1 |
-----------------------------------------------------------------------------
--
-- Module : Packing
-- Copyright : Armin Kazmi (2015)
-- License : MIT
--
-- Maintainer : Armin Kazmi
-- Stability : experimental
-- Portability : GHC only or compatible
--
-- |
--
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -F -pgmF htfpp #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module DAAK.Algorithms.Gamo.Packing where
import Test.Framework
import Control.Monad
import DAAK.Algorithms.Gamo.Encoding
import DAAK.Core.ResidualSpace
import DAAK.Core.Space3D as S
import DAAK.Core.Utilities
import Data.List as L
import Data.Map as M
import Data.Vect
-- | Package ids are of type Int
type IdKey = Int
-- | Package quantities are of type Int
type Quantity = Int
-- | A single item/package has an id and a quantity
type ItemQuantity = (IdKey, Quantity)
-- | Several items with their quantities
type ItemQuantities = [ItemQuantity]
-- | A map from an item key index and its quantity
type ItemQuantityMap = Map IdKey Quantity
-- | A Problem consists of a loadspace (EuclideanSpace3D) several indexed
-- packages and items with their quantities
data ProblemDescription = ProblemDescription !LoadSpace ![IndexedSpace] !ItemQuantities
deriving (Show, Eq)
-- | A packing algebraic type
data Packing = Packing !LoadSpace ![IndexedSpace] ![ResidualSpace] Int -- ^ A non empty packing with loadspace, indexed packages, residual spaces and genes used
| EmptyPacking !LoadSpace -- ^ An empty packing with the loadspace used
deriving (Show)
-- | Constructor for type Packing, creating either a Packing or EmptyPacking
mkPacking :: LoadSpace -> [IndexedSpace] -> [ResidualSpace] -> Int -> Packing
mkPacking load [] _ _ = EmptyPacking load
mkPacking load ispaces residuals usedC = Packing load ispaces residuals usedC
-- | Constructor for type Packing, creating either a Just Packing or Nothing
mkPackingMaybe :: LoadSpace -> [IndexedSpace] -> [ResidualSpace] -> Int -> Maybe Packing
mkPackingMaybe l is rs used
| packing <- mkPacking l is rs used
, Packing{} <- packing
= Just packing
| otherwise
= Nothing
-- | For a given list of 'ItemQuantity', calculate the total amount of items.
itemsCount :: ItemQuantities -> Int
itemsCount = sum . fmap snd
-- | Replicate all indexed packages with their quantities into a flat list.
-- Say package A is to be placed 3 times, package B twice. Then the resulting list will be:
-- [A, A, A, B, B]. The order of packages is defined by the order of the provided list of packages.
qutantityReplicateItems :: ItemSpaces -> ItemQuantities -> ItemSpaces
qutantityReplicateItems is =
concatMap (\(key, quantity) -> replicate quantity $ is !! key)
genesUsed :: Maybe Packing -> Int
genesUsed Nothing = 0
genesUsed (Just (EmptyPacking _)) = 0
genesUsed (Just (Packing _ _ _ used)) = used
type AxeOrder = [Axis]
-- | Possible fill orders as a list of a list of axis (currently for 2D only)
allFillOrders :: [AxeOrder]
--allFillOrders = [[Y, Z], [Z, Y], [X, Y], [Y, X], [X, Z], [Z, X]]
allFillOrders = [[X, Y], [Y, X]]
-- | try to fill a residual space @rspace@
fillResidual :: (EuclideanSpace3D, ResidualSpace) -- ^ Package and picked residual space
-> Quantity -- ^ quantity of packages (to be maximized in the filling)
-> AxeOrder -- ^ axe fill order
-> Maybe (EuclideanSpace3D, [Vec3], Quantity) -- ^ used package, all translation vectors and used quantity or @Nothing@
-- if not a single item fits
fillResidual (space, residual) q order
| order == [Y, Z]
, facs <- take minq
[ Vec3 x y z
| y <- [0 .. fromIntegral ys]
, z <- [0 .. fromIntegral zs]
, x <- [0 .. fromIntegral xs]
]
, not $ L.null facs
= Just (space, facs, minq)
| order == [Z, Y]
, facs <- take minq
[ Vec3 x y z
| z <- [0 .. fromIntegral zs]
, y <- [0 .. fromIntegral ys]
, x <- [0 .. fromIntegral xs]
]
, not $ L.null facs
= Just (space, facs, minq)
| order == [X, Y]
, facs <- take minq
[ Vec3 x y z
| x <- [0 .. fromIntegral xs]
, y <- [0 .. fromIntegral ys]
, z <- [0 .. fromIntegral zs]
]
, not $ L.null facs
= Just (space, facs, minq)
| order == [Y, X]
, facs <- take minq
[ Vec3 x y z
| y <- [0 .. fromIntegral ys]
, x <- [0 .. fromIntegral xs]
, z <- [0 .. fromIntegral zs]
]
, not $ L.null facs
= Just (space, facs, minq)
| order == [X, Z]
, facs <- take minq
[ Vec3 x y z
| x <- [0 .. fromIntegral xs]
, z <- [0 .. fromIntegral zs]
, y <- [0 .. fromIntegral ys]
]
, not $ L.null facs
= Just (space, facs, minq)
| order == [Z, X]
, facs <- take minq
[ Vec3 x y z
| z <- [0 .. fromIntegral zs]
, x <- [0 .. fromIntegral xs]
, y <- [0 .. fromIntegral ys]
]
, not $ L.null facs
= Just (space, facs, minq)
| otherwise
= Nothing
where
sresidual = S.size $ rspace residual
sspace = mapVec (1/) $ S.size space
fits = sresidual `pointwise` sspace
(xs, ys, zs) = ( floor (vx fits) -1
, floor (vy fits) -1
, min (floor (vz fits) -1 ) 0
)
minq = min q ((xs + 1) * (ys + 1) * (zs + 1))
-- | Try to fill a residual space 'rspace'.
-- in contrast to @fillResidual@ the used freespace is also returned
fillQuantityMaybe :: (EuclideanSpace3D, ResidualSpace) -- ^ Package and picked residual space
-> AxeOrder -- ^ axe fill order
-> Quantity -- ^ quantity of packages (to be maximized in the filling)
-> Maybe (ResidualSpace, EuclideanSpace3D, [Vec3], Quantity) -- ^ used residual space, ackage, all translation vectors and used quantity or @Nothing@
-- if not a single item fits
fillQuantityMaybe (space, residual) axeOrder quantity
| quantity == 0
= Nothing
| Just (es, vectors, possibleQuantity) <- fillResidual (space, residual) quantity axeOrder
, not $ L.null vectors
= Just (residual, es, vectors, possibleQuantity)
| otherwise
= Nothing
-- | Pick next residualspace for which a filling with a package succeeds or 'Nothing'
-- in case the package cannot be placed in any of the residual spaces.
pickNextResidual :: [ResidualSpace] -- ^ The residual spaces to be tried
-> EuclideanSpace3D -- ^ A package
-> AxeOrder -- ^ The fillorder to be used
-> Quantity -- ^ The maximum amount of packages to be placed
-> Maybe (ResidualSpace, EuclideanSpace3D, [Vec3], Quantity) -- ^ The picked residual space, used package, all translation vectors and used quantity
-- or 'Nothing' if not a single package could be placed in any residual space
pickNextResidual rs p ao q = L.foldl (\b a -> b `mplus` fillQuantityMaybe (p, a) ao q) Nothing rs
-- | For every @q@ in [1..q_i] replicate incomplete selections of the
-- the specified filled residualspace
quantityReplicate :: (ResidualSpace, EuclideanSpace3D, [Vec3], Quantity) -- ^ The used residual space, untranslated package, translation vectors and quantity
-> [(ResidualSpace, EuclideanSpace3D, [Vec3])] -- ^ A list with the same residual space, same untranslated package but selections of the translation vectors
-- for every q in [1..q]
quantityReplicate (r, e, vs, q) = fmap (\qi -> (r, e, take qi vs)) [1..q]
-- | Select the next fitting residual space that can contain
-- at least one package. Calculate a filled block for the given fill ordr with max
-- @q@ packages. Generate all subselections from 1..'q$ for that block.
-- If no single item fits in any of the residual spaces, return 'Nothing'.
placeNextCandidates :: LoadSpace -- ^ The used load space
-> [ResidualSpace] -- ^ All current residual spaces
-> AxeOrder -- ^ A fill order
-> EuclideanSpace3D -- ^ A package to be placed
-> Quantity -- ^ The package quantity
-> Maybe [(ResidualSpace, EuclideanSpace3D, [Vec3])] -- ^ All selections to place at least one package for the next fitting residual space
-- (selections in the sense of partial selections of a filled block)
placeNextCandidates _ [] _ _ _ = Nothing
placeNextCandidates load residual order package quantity
| quantity == 0
= Nothing
| not $ package `isInside` load
= Nothing
| otherwise
= pickNextResidual residual package order quantity >>= Just . quantityReplicate
-- | Index all provided packages with the given index as 'IndexedSpace'.
-- | Also keep the provided residual spaces.
indexPackingStep :: Int -> ([EuclideanSpace3D], [ResidualSpace]) -> ([IndexedSpace], [ResidualSpace])
indexPackingStep i (es, rs) = (fmap (indexSpace i) es, rs)
-- | Expand a placement. For the given package index 'i',
-- residual spaces, and a packing candidate selection, index
-- and translate all packages. Also make sure the resulting new residuals
-- maintain the dominance/inclusion and order relation.
-- The following condition must be true at all times:
--
-- prop> r `elem` rs == True
expandPlacement :: Int -- ^ The index of the package used
-> LoadSpace
-> [EuclideanSpace3D]
-> [ResidualSpace] -- ^All current residual spaces 'rs'
-> (ResidualSpace, EuclideanSpace3D, [Vec3]) -- ^ The picked residual space 'r', the untranslated package and translation vectors
-> ([IndexedSpace], [ResidualSpace]) -- ^ The indexed packages and the resulting list of new residual spaces
expandPlacement i load spaces residualspaces (residual, package, translations) =
indexPackingStep i (tes, splitFoldDominant residualspaces tes)
where
residualStart = start $ rspace residual
packageSize = S.size package
tes = fmap (\v -> package `translate` (residualStart &+ (packageSize `pointwise` v))) translations
modChromsomePackages :: ([ChromosomeType] -> [ChromosomeType]) -> PackingSelectors -> PackingSelectors
modChromsomePackages f (orientationCs, fillCs, selectCs) = (f orientationCs, f fillCs, f selectCs)
-- | Combine flat representation of an intermediate packing state.
-- This would be the location to implement a "failure as fast as possible" variant.
--
-- Conditions for 'Just' and 'Nothing' for this combination:
--
-- A packing order that can not even place its first package is considered useless:
-- 'isNothing' A => 'Nothing'
--
-- A packing order that did already place something will always remain valid
-- 'isJust' A && 'isNothing' B => 'Just A'
--
-- A packing order that was already valid and is to be extended will always be valid
-- 'isJust' A && 'isJust' B => 'Just A + B'
combinePlacements :: Maybe ([IndexedSpace], [ResidualSpace], PackingSelectors, ItemQuantityMap, Int)
-- ^ The currently placed packages, residual spaces, packing selecotrs, quantities and used chromosomes ('A')
-> Maybe ([IndexedSpace], [ResidualSpace]) -- ^ To be added packages and to be replaced residual spaces ('B')
-> Maybe ([IndexedSpace], [ResidualSpace], PackingSelectors, ItemQuantityMap, Int) -- ^ The resulting update of the state
combinePlacements Nothing _ = Nothing
--combinePlacements (Just _) Nothing = Nothing
-- A single chromosome was used, even though no extension happened
combinePlacements (Just (packages, residuals, chromosomePackages, qm, used)) Nothing =
Just (packages, residuals, chromosomePackages, qm, succ used)
-- A single chromosome was used to extend the packing by a number of packages.
combinePlacements (Just (packages, _, chromosomePackages, oqm, used)) (Just (newPackages, newResiduals)) =
Just (packages ++ newPackages, newResiduals, safeTail chromosomePackages, qm, succ used)
where
lengthNew = length newPackages
qm = if L.null newPackages then oqm
else M.adjust (\k -> k - lengthNew) (fst $ head newPackages) oqm
safeTail = modChromsomePackages (drop 1)
-- | Select an element from a list by projecting a selector in [0.0, 1.0]
-- over the indices of the list. Eg. a selector of 1 will always select the last element,
-- 0.5 in the center and 0 at the front.
select01 :: (Fractional b, RealFrac b) => [a] -> b -> a
select01 ls i = ls !! idx
where
lenlist = length ls
idx = floor $ minmax 0.0 1.0 i * fromIntegral (lenlist - 1)
-- | Retrieve the packing extension fora given loadspace, an
-- index package and its quantity, a single chromosome package and the current
-- list of residual spaces.
pickNextPack :: LoadSpace -- ^ The loadspace used
-> [EuclideanSpace3D]
-> IndexedSpace -- ^ The index package and its space
-> Quantity -- ^ The package quantity
-> PackingSelector -- ^ The chromosome package for the package
-> [ResidualSpace] -- ^ The current list of residual spaces
-> Maybe ([IndexedSpace], [ResidualSpace]) -- ^ A packing extension, defined by a number of placed packages and a new list of residual spaces
pickNextPack load packed (idx, package) quantity (orientationSelect, fillOrder, candidateSelect) rs
| orientation <- sizePermuteReduced package
, pickedOrientation <- select01 orientation orientationSelect
, pickedAxeOrder <- select01 allFillOrders fillOrder
-- sort residual spaces first, then try to get a packing extension for the supplied
-- orientation, fill order and candidate selector
, Just cs <- placeNextCandidates load (L.sortBy startOrd rs) pickedAxeOrder pickedOrientation quantity
, not $ L.null cs
, selected <- select01 cs candidateSelect
-- A packing extension could be found, now it needs to be indexed
= Just $ expandPlacement idx load packed rs selected
| otherwise
= Nothing
-- | Try to place all packages on a loadspace using the provided
-- information in the PackingSelectors (the genome), all packages and their
-- quantities. This is the "start" of the heuristic packing algorithm.
-- In case no single item could be placed, even with skipping, the algorithm
-- returns 'Nothing', otherwise a packing in form of translated indexed packages,
-- the current residual spaces and the amount of chromosomes used.
placeAll :: LoadSpace -- ^ The loadspace
-> PackingSelectors -- ^ The full genome
-> [IndexedSpace] -- ^ Untranslated indexed packages
-> ItemQuantities -- ^ The quantities for every packge
-> Maybe ([IndexedSpace], [ResidualSpace], Int) -- ^ A packing with translated indexed packages,
-- residual spaces and the amount of chromosomes used.
placeAll load chromosomePackages@(orientatioCs, fillCs, selectCs) ipackages iqs
-- Check prerequisites for algorithm, especially the size of the genome
| length orientatioCs /= length fillCs ||
length orientatioCs /= length selectCs
= error $ "incorrectly sized genome lists (orientatioCs " ++
show (length orientatioCs) ++ ", fillCs " ++
show (length fillCs) ++ ", selectCs " ++
show (length selectCs) ++ ")"
| otherwise
= extractData <$>
L.foldl (\oldState ipackage -> do
(packed, residual, (orientation, fillOrder, candidateSelect), qm, _) <- oldState
-- check item quantity
let q = qm ! fst ipackage
-- skip already fully packed items
if q == 0 then oldState
else combinePlacements oldState $
pickNextPack load (snd <$> packed) ipackage q
(head orientation, head fillOrder, head candidateSelect)
residual
)
-- The initial state. nothing placed, only the loadspace as residual,
-- chromosomes unpacked and split, initialized quantities, and 0 chromosomes used.
(Just ([], [mkResidualSpace load], chromosomePackages, M.fromList iqs, 0)) ipackages
where extractData (placements, residuals, _, _, used) = (placements, residuals, used)
| apriori/daak | lib/DAAK/Algorithms/Gamo/Packing.hs | mit | 16,610 | 0 | 16 | 4,187 | 3,263 | 1,816 | 1,447 | 230 | 2 |
module Hiker where
answer :: Int
answer = 6 * 9
isSolution [_] = True
isSolution [_, _] = False
isSolution [_, _, _] = False
isSolution cells = (not (hasDuplicates (map toColumn cells))) && (not (hasDuplicates (map toRow cells))) && (not diag cells)
where toColumn (_,c) = c
toRow(r, _) = r
toDiag(x,y) = x-y
hasDuplicates (v1:tail) = elem v1 tail || (hasDuplicates tail)
hasDuplicates [] = False | murex/murex-coding-dojo | Paris/2015/2015-03-19-EightQueens-Randori-Coffeescript-Haskell/hiker.hs | mit | 422 | 0 | 12 | 94 | 212 | 114 | 98 | 12 | 1 |
--
-- riot/Riot/Entry.hs
--
-- Copyright (c) Tuomo Valkonen 2004-2005.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- Module information {{{
module Riot.Entry(
Entry(..),
--EditableEntry(..),
EntryTree(..),
InsertWhere(..),
Loc(..),
TagAction(..),
new_entrytree,
list_to_entrytree,
entrytree_set_entry,
entrytree_get,
entrytree_map,
entrytree_map_maybe,
entrytree_map_path,
entrytree_flatten,
entrytree_replace_f,
entrytree_replace,
entrytree_insert,
entrytree_remove,
entrytree_move,
entrytree_expand,
entrytree_collapse,
entrytree_collapse_p,
entrytree_expand_,
entrytree_collapse_,
entrytree_tag,
entrytree_clear_tags,
entrytree_get_tagged,
entrytree_fold_loc,
entrytree_fold,
loc_rm_effect,
loc_rm_effect_insw,
loc_ins_effect,
loc_above,
loc_firstunder,
loc_lastunder,
loc_after,
loc_before
)where
-- }}}
-- Imports {{{
import Maybe
import List(sort)
import Time(CalendarTime)
--import Riot
-- }}}
-- Classes instances & misc {{{
class Entry a where
entry_title :: a -> String
entry_title e = ""
entry_text :: a -> String
entry_text e = ""
entry_flags :: a -> String
entry_flags e = ""
data Entry a => EntryTree a = EntryTree {
entrytree_expanded :: Bool,
entrytree_tagged :: Bool,
entrytree_thisentry :: a,
entrytree_children :: [EntryTree a]
}
instance Entry a => Entry (EntryTree a) where
entry_title = entry_title . entrytree_thisentry
entry_text = entry_text . entrytree_thisentry
entry_flags = entry_flags . entrytree_thisentry
new_entrytree e =
EntryTree False False e []
entrytree_set_entry et e =
et{entrytree_thisentry = e}
-- }}}
-- Location stuff {{{
newtype Loc = Loc [Int] deriving Eq
instance Ord Loc where
(Loc []) <= (Loc _) = True
(Loc _) <= (Loc []) = False
(Loc (l1:ll1)) <= (Loc (l2:ll2)) =
if l1 < l2 then
True
else if l1==l2 then
(Loc ll1) <= (Loc ll2)
else
False
from_loc (Loc ll) = ll
loc_before (Loc ll) = Loc $ loc_before_ ll
where
loc_before_ (l:[]) | l>0 = [l-1]
loc_before_ (l:ll) = l:(loc_before_ ll)
loc_after (Loc ll) = Loc $ loc_after_ ll
where
loc_after_ (l:[]) = [l+1]
loc_after_ (l:ll) = l:(loc_after_ ll)
loc_firstunder (Loc ll) = Loc (ll++[0])
loc_lastunder (Loc ll) nch = Loc (ll++[nch])
loc_above (Loc ll) = (Loc $ init ll)
-- }}}
-- Get {{{
entrytree_get :: Entry a => [EntryTree a] -> Loc -> EntryTree a
entrytree_get (e:et) (Loc (0:[])) = e
entrytree_get (e:et) (Loc (0:loc)) = entrytree_get (entrytree_children e) (Loc loc)
entrytree_get (e:et) (Loc (n:loc)) | n>0 = entrytree_get et (Loc ((n-1):loc))
entrytree_get _ _ = error "Invalid entry tree location"
-- }}}
-- entrytree_map, entrytree_map_path {{{
entrytree_map_ f [] _ = []
entrytree_map_ f (e2:et) rrr@(r:rr) =
e2map++etmap
where
nch = entrytree_map_ f (entrytree_children e2) (0:rrr)
e2map = f e2 nch (Loc $ reverse rrr)
etmap = entrytree_map_ f et ((r+1):rr)
entrytree_map :: Entry a => (EntryTree a -> [b] -> Loc -> [b]) -> [EntryTree a] -> [b]
entrytree_map f et = entrytree_map_ f et [0]
-- Apply function to all entries in the tree, starting from leaves
-- and feeding the function also a Maybe indicating whether the children
-- of the entry in question were changed.
entrytree_map_maybe_ f [] _ = Nothing
entrytree_map_maybe_ f (e2:et) rrr@(r:rr) =
case (e2n, etn) of
(Nothing, Nothing) -> Nothing
(Nothing, Just etn) -> Just (e2:etn)
(Just e2n, Nothing) -> Just (e2n++et)
(Just e2n, Just etn) -> Just (e2n++etn)
where
nch = entrytree_map_maybe_ f (entrytree_children e2) (0:rrr)
e2n = f e2 nch (Loc $ reverse rrr)
etn = entrytree_map_maybe_ f et ((r+1):rr)
entrytree_map_maybe :: Entry a =>
(EntryTree a -> Maybe [EntryTree a] -> Loc -> Maybe [EntryTree a])
-> [EntryTree a] -> Maybe [EntryTree a]
entrytree_map_maybe f et = entrytree_map_maybe_ f et [0]
-- Similar to entrytree_map, but restrict to working on a path given by
-- a Loc.
entrytree_map_path_ f [] _ _ = Nothing
entrytree_map_path_ f _ [] _ = Nothing
entrytree_map_path_ f (e:et) (l:ll) rrr@(r:rr) | l==r =
maybe Nothing (\e_ -> Just (e_++et)) ne
where
nch = entrytree_map_path_ f (entrytree_children e) ll (0:rrr)
ne = f e nch (Loc $ reverse rrr)
entrytree_map_path_ f (e:et) lll@(l:ll) rrr@(r:rr) | r<l =
case entrytree_map_path_ f et lll ((r+1):rr) of
Nothing -> Nothing
Just et_ -> Just (e:et_)
entrytree_map_path :: Entry a =>
(EntryTree a -> Maybe [EntryTree a] -> Loc -> Maybe [EntryTree a])
-> [EntryTree a] -> Loc -> Maybe [EntryTree a]
entrytree_map_path f et (Loc ll) = entrytree_map_path_ f et ll [0]
-- }}}
-- Folding-type operations {{{
entrytree_fold_loc :: Entry a =>
(Loc -> b -> a -> b) -> b -> [EntryTree a] -> b
entrytree_fold_loc = entrytree_fold_loc_ (Loc [0])
entrytree_fold_loc_ _ _ v [] = v
entrytree_fold_loc_ l f v (e:et) =
let vn = f l v (entrytree_thisentry e)
lu = (loc_firstunder l)
vu = entrytree_fold_loc_ lu f vn (entrytree_children e)
in entrytree_fold_loc_ (loc_after l) f vu et
entrytree_fold f = entrytree_fold_loc (\loc -> f)
-- }}}
-- Conversions: flatten, list_to_entrytree {{{
entrytree_flatten :: Entry a => [EntryTree a] -> [(Bool, Int, a)]
entrytree_flatten et =
entrytree_map f et
where
f e chflat (Loc loc) =
(entrytree_expanded e, length loc, entrytree_thisentry e):chflat
list_to_entrytree_ :: Entry a => Int -> [(Bool, Int, a)] -> ([EntryTree a], [(Bool, Int, a)])
list_to_entrytree_ _ [] = ([], [])
list_to_entrytree_ d eee@(e@(e_x, e_d, e_e):ee)
| e_d<d = ([], eee)
| e_d==d = (et:more, ee__)
| otherwise = error "Invalid depths in list"
where
et = EntryTree e_x False e_e e_ch
(e_ch, ee_) = list_to_entrytree_ (d+1) ee
(more, ee__) = list_to_entrytree_ d ee_
list_to_entrytree :: Entry a => [(Bool, Int, a)] -> [EntryTree a]
list_to_entrytree et = fst $ list_to_entrytree_ 0 et
-- }}}
-- Remove/insert effect calculation on locations {{{
rm_effect_ :: [Int] -> [Int] -> [Int]
rm_effect_ [] [] = error "Invalid insertion point"
rm_effect_ _ [] = []
rm_effect_ (r:[]) iii@(i:ii) =
if r<i then (i-1):ii
else if r==i then error "Invalid insertion point"
else iii
rm_effect_ (r:rr) iii@(i:ii) =
if r==i then i:(rm_effect_ rr ii)
else iii
has_init :: Loc -> Loc -> Bool
has_init (Loc l2) (Loc l1) = take (length l1) l2 == l1
-- The list must be sorted
rm_inits (l1:ll@(l2:ll2)) =
if has_init l2 l1 then
rm_inits (l1:ll2)
else
l1:(rm_inits ll)
rm_inits ll = ll
loc_rm_effect loc locv =
if null notafter then
Just loc
else if has_init loc (last notafter) then
Nothing -- loc will be removed!
else
Just $ foldl sub_effect loc (reverse $ notafter)
where
notafter = filter (\l -> l<=loc) $ rm_inits $ sort locv
sub_effect (Loc ll) (Loc su) = Loc $ rm_effect_ su ll
ins_effect_before [] [] ne = error "Invalid insertion"
ins_effect_before [] _ ne = []
ins_effect_before _ [] ne = []
ins_effect_before lll@(l:ll) (i:ii) ne
| l >= i && null ii = (l+ne):ll
| l == i = l:(ins_effect_before ll ii ne)
| otherwise = lll
ins_effect_after [] [] ne = []
ins_effect_after [] _ ne = []
ins_effect_after _ [] ne = []
ins_effect_after lll@(l:ll) (i:ii) ne
| l > i && null ii = (l+ne):ll
| l == i = l:(ins_effect_after ll ii ne)
| otherwise = lll
ins_effect_firstunder (l:ll) [] ne = (l+ne):ll
ins_effect_firstunder [] _ ne = []
ins_effect_firstunder lll@(l:ll) (i:ii) ne
| l == i = l:(ins_effect_firstunder ll ii ne)
| otherwise = lll
ins_effect_lastunder lll iii ne = lll
loc_ins_effect :: Loc -> InsertWhere -> Int -> Loc
loc_ins_effect loc Last _ =
loc
loc_ins_effect (Loc (l:ll)) First ne =
Loc $ (l+ne):ll
loc_ins_effect (Loc loc) (Before (Loc iloc)) ne =
Loc $ ins_effect_before loc iloc ne
loc_ins_effect (Loc loc) (After (Loc iloc)) ne =
Loc $ ins_effect_after loc iloc ne
loc_ins_effect (Loc loc) (FirstUnder (Loc iloc)) ne =
Loc $ ins_effect_firstunder loc iloc ne
loc_ins_effect (Loc loc) (LastUnder (Loc iloc)) ne =
Loc $ ins_effect_lastunder loc iloc ne
mpass f = maybe Nothing (Just . f)
loc_rm_effect_insw :: InsertWhere -> [Loc] -> Maybe InsertWhere
loc_rm_effect_insw First _ = Just First
loc_rm_effect_insw Last _ = Just Last
loc_rm_effect_insw (Before loc) locv = mpass Before $ loc_rm_effect loc locv
loc_rm_effect_insw (After loc) locv = mpass After $ loc_rm_effect loc locv
loc_rm_effect_insw (FirstUnder loc) locv = mpass FirstUnder $ loc_rm_effect loc locv
loc_rm_effect_insw (LastUnder loc) locv = mpass LastUnder $ loc_rm_effect loc locv
-- }}}
-- Remove, insert, replace, move etc. {{{
-- Replace
entrytree_replace_f :: Entry a => [EntryTree a] -> Loc -> (EntryTree a -> ([EntryTree a], b)) -> ([EntryTree a], b)
entrytree_replace_f (e:et) (Loc (0:[])) f = (\(fe, x) -> (fe ++ et, x)) $ f e
entrytree_replace_f (e:et) (Loc (0:loc)) f =
(e{entrytree_children = ch}:et, x)
where
(ch, x) = entrytree_replace_f (entrytree_children e) (Loc loc) f
entrytree_replace_f (e:et) (Loc (n:loc)) f
| n>0 = (e:et2, x)
where
(et2, x) = entrytree_replace_f et (Loc ((n-1):loc)) f
entrytree_replace_f _ _ _ = error "Invalid entry tree location"
entrytree_replace :: Entry a => [EntryTree a] -> Loc -> EntryTree a -> [EntryTree a]
entrytree_replace et loc enew =
fst $ entrytree_replace_f et loc $ \_ -> ([enew], ())
-- Insert
data InsertWhere =
First | Last |
Before Loc | After Loc |
FirstUnder Loc | LastUnder Loc
entrytree_insert :: Entry a => [EntryTree a] -> InsertWhere -> [EntryTree a] -> ([EntryTree a], Loc)
entrytree_insert et First eins =
(eins ++ et, Loc [0])
entrytree_insert et Last eins =
(et ++ eins, Loc [length et])
entrytree_insert et (Before loc) eins =
(fst $ entrytree_replace_f et loc $ \e -> (eins ++ [e], ()), loc_before loc)
entrytree_insert et (After loc) eins =
(fst $ entrytree_replace_f et loc $ \e -> ([e] ++ eins, ()), loc_after loc)
entrytree_insert et (FirstUnder loc) eins =
(fst $ entrytree_replace_f et loc f, loc_firstunder loc)
where
f e = ([e{entrytree_children = eins ++ entrytree_children e}], ())
entrytree_insert et (LastUnder loc) eins =
g $ entrytree_replace_f et loc f
where
f e = ([e{entrytree_children = ch ++ eins}], length ch)
where
ch = entrytree_children e
g (et, nch) = (et, loc_lastunder loc nch)
-- Remove
-- location list must be reverse-sorted for entrytree_remove_
entrytree_remove_ :: Entry a => [EntryTree a] -> [Loc] -> ([EntryTree a], [EntryTree a])
entrytree_remove_ et [] = (et, [])
entrytree_remove_ et (l:ll) =
(\(et_, el) -> (et_, e:el)) $ entrytree_remove_ et2 ll
where
(et2, e) = entrytree_replace_f et l (\e_ -> ([], e_))
entrytree_remove :: Entry a => [EntryTree a] -> [Loc] -> [EntryTree a]
entrytree_remove et locv = fst $ entrytree_remove_ et (reverse $ sort locv)
-- Move
entrytree_move :: Entry a => [EntryTree a] -> InsertWhere -> [Loc] -> ([EntryTree a], Loc)
entrytree_move et insw locv =
case insw_ of
Nothing -> error "List of entries to be moved contains (a parent of) target."
Just insw__ -> entrytree_insert et_ insw__ (reverse entries)
where
insw_ = loc_rm_effect_insw insw locv
(et_, entries) = entrytree_remove_ et (reverse $ sort locv)
-- }}}
-- Expand & collapse {{{
entrytree_expand :: Entry a => [EntryTree a] -> Loc -> Maybe [EntryTree a]
entrytree_expand = entrytree_map_path f
where
f e Nothing _
| entrytree_expanded e || length (entrytree_children e) == 0 = Nothing
| otherwise = Just [e{entrytree_expanded = True}]
f e (Just nch) _ =
Just [e{entrytree_expanded = True, entrytree_children=nch}]
entrytree_collapse :: Entry a => [EntryTree a] -> Loc -> Maybe [EntryTree a]
entrytree_collapse et loc = entrytree_map_path f et loc
where
f e _ eloc | loc==eloc =
case entrytree_expanded e of
True -> Just [e{entrytree_expanded = False}]
False -> Nothing
f e Nothing _ = Nothing
f e (Just nch) _ =
Just [e{entrytree_children=nch}]
entrytree_collapse_p :: Entry a => [EntryTree a] -> Loc -> Maybe [EntryTree a]
entrytree_collapse_p et loc@(Loc cl) = entrytree_map_path f et loc
where
f e Nothing (Loc ll) | (length cl - length ll) <= 1 =
case entrytree_expanded e of
True -> Just [e{entrytree_expanded = False}]
False -> Nothing
f e Nothing _ = Nothing
f e (Just nch) _ =
Just [e{entrytree_children=nch}]
entrytree_expand_ et = (fromMaybe et) . (entrytree_expand et)
entrytree_collapse_ et = (fromMaybe et) . (entrytree_collapse et)
-- }}}
-- {{{ Tagging
data TagAction = TagSet | TagUnset | TagToggle
entrytree_tag :: Entry a => [EntryTree a] -> Loc -> TagAction -> Maybe [EntryTree a]
entrytree_tag et loc what =
entrytree_map_path f et loc
where
f e Nothing eloc | loc==eloc =
case (entrytree_tagged e, what) of
(False, TagSet) -> Just [e{entrytree_tagged=True}]
(False, TagToggle) -> Just [e{entrytree_tagged=True}]
(True, TagUnset) -> Just [e{entrytree_tagged=False}]
(True, TagToggle) -> Just [e{entrytree_tagged=False}]
otherwise -> Nothing
f e (Just nch) _ =
Just [e{entrytree_children = nch}]
f _ _ _ = Nothing
entrytree_clear_tags :: Entry a => [EntryTree a] -> Maybe [EntryTree a]
entrytree_clear_tags et =
entrytree_map_maybe f et
where
f e Nothing _ =
case entrytree_tagged e of
False -> Nothing
True -> Just [e{entrytree_tagged=False}]
f e (Just nch) _ =
Just [e{entrytree_tagged=False, entrytree_children=nch}]
entrytree_get_tagged :: Entry a => [EntryTree a] -> [Loc]
entrytree_get_tagged et =
entrytree_map f et
where
f e chloc loc =
case entrytree_tagged e of
True -> loc:chloc
False -> chloc
-- }}}
| opqdonut/riot | Riot/Entry.hs | gpl-2.0 | 14,821 | 0 | 14 | 3,692 | 5,797 | 3,036 | 2,761 | 317 | 7 |
{-# LANGUAGE RecursiveDo #-}
import Probability
import Tree
import Tree.Newick
n_leaves = 3
allStrings = [ c : s | s <- "" : allStrings, c <- ['a','b','c','d','e','f','g','h','i','j'] ]
model = do
tree <- uniform_time_tree 1.0 n_leaves
let ltree = add_labels (take n_leaves allStrings) tree
let pr = uniform_time_tree_pr 1.0 n_leaves ltree
let ps = map (\n -> show (n, parentNode tree n)) [0 .. numNodes tree - 1]
rec let mu node = case parentNode tree node of
Nothing -> 0.0
Just node -> xs !! node
xs <- independent [ normal (mu node) 1.0 | node <- nodes tree ]
-- can we _observe_ from this? -- why or why not?
return ["tree" %=% write_newick tree] --,"pr" %=% pr, "xs" %=% xs, "ps" %=% ps]
main = do
mcmc model
| bredelings/BAli-Phy | tests/prob_prog/sample_tree/4/Main.hs | gpl-2.0 | 825 | 0 | 15 | 242 | 291 | 147 | 144 | 18 | 2 |
module Locals where
import Graphics.X11.Xlib
import XMonad
myFont :: String
myFont = "-*-terminus-*-*-*-*-32-*-*-*-*-*-*-*"
myPromptHeight :: Dimension
myPromptHeight = 50
myBorderWidth :: Dimension
myBorderWidth = 2
myWorkspaces :: [WorkspaceId]
myWorkspaces = ["con","edit","www"] ++ map show [4 .. 9]
| shaohme/home-dir | .xmonad/lib/Locals.hs | gpl-3.0 | 306 | 0 | 7 | 39 | 80 | 49 | 31 | 11 | 1 |
-- problem description:
-- 2 inputs A and B
-- 2 outputs A and B
-- each input data has to be copied to the corresponding output
-- inputs
inA :: [Int]
inA = [4, 5, -1, 9, 8, 5, 6]
inB :: [Int]
inB = [4, -6, 8, 7, 11, 43, 1]
-- expected outputs
outA :: [Int]
outA = [4, 5, -1, 9, 8, 5, 6]
outB :: [Int]
outB = [4, -6, 8, 7, 11, 43, 1]
-- for that simple problem, they are identical
-- process function
process :: Int -> Int
process x = x
-- simply returns the same parameter
-- output check function
check :: Int -> Int -> (Int, Int, Bool)
check i o = (i, result, o == result)
where result = process i
checkAll :: [Int] -> [Int] -> [(Int, Int, Bool)]
-- original solution
--checkAll [] [] = []
--checkAll (i:is) (o:os) = (check i o):checkAll is os
-- other implementation proposed
-- the uncurry thing is still a bit strange to me
--checkAll i o = map (uncurry check) (zip i o)
-- or yet another function that I could use:
checkAll i o = zipWith check i o
-- it's getting shorter and shorter!
-- another way to generate the list comprehension instead of checkAll
-- could be:
-- [check i o | (i, o) <- zip inA outA]
-- testing the whole thing
run = do
print (checkAll inA outA)
print (checkAll inB outB)
main = do
run
| simonced/haskell-kata | tis-100/prob01.hs | gpl-3.0 | 1,247 | 0 | 9 | 280 | 328 | 200 | 128 | 20 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.InstanceGroupManagers.DeletePerInstanceConfigs
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes selected per-instance configs for the managed instance group.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instanceGroupManagers.deletePerInstanceConfigs@.
module Network.Google.Resource.Compute.InstanceGroupManagers.DeletePerInstanceConfigs
(
-- * REST Resource
InstanceGroupManagersDeletePerInstanceConfigsResource
-- * Creating a Request
, instanceGroupManagersDeletePerInstanceConfigs
, InstanceGroupManagersDeletePerInstanceConfigs
-- * Request Lenses
, igmdpicProject
, igmdpicInstanceGroupManager
, igmdpicZone
, igmdpicPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instanceGroupManagers.deletePerInstanceConfigs@ method which the
-- 'InstanceGroupManagersDeletePerInstanceConfigs' request conforms to.
type InstanceGroupManagersDeletePerInstanceConfigsResource
=
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instanceGroupManagers" :>
Capture "instanceGroupManager" Text :>
"deletePerInstanceConfigs" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
InstanceGroupManagersDeletePerInstanceConfigsReq
:> Post '[JSON] Operation
-- | Deletes selected per-instance configs for the managed instance group.
--
-- /See:/ 'instanceGroupManagersDeletePerInstanceConfigs' smart constructor.
data InstanceGroupManagersDeletePerInstanceConfigs =
InstanceGroupManagersDeletePerInstanceConfigs'
{ _igmdpicProject :: !Text
, _igmdpicInstanceGroupManager :: !Text
, _igmdpicZone :: !Text
, _igmdpicPayload :: !InstanceGroupManagersDeletePerInstanceConfigsReq
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InstanceGroupManagersDeletePerInstanceConfigs' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'igmdpicProject'
--
-- * 'igmdpicInstanceGroupManager'
--
-- * 'igmdpicZone'
--
-- * 'igmdpicPayload'
instanceGroupManagersDeletePerInstanceConfigs
:: Text -- ^ 'igmdpicProject'
-> Text -- ^ 'igmdpicInstanceGroupManager'
-> Text -- ^ 'igmdpicZone'
-> InstanceGroupManagersDeletePerInstanceConfigsReq -- ^ 'igmdpicPayload'
-> InstanceGroupManagersDeletePerInstanceConfigs
instanceGroupManagersDeletePerInstanceConfigs pIgmdpicProject_ pIgmdpicInstanceGroupManager_ pIgmdpicZone_ pIgmdpicPayload_ =
InstanceGroupManagersDeletePerInstanceConfigs'
{ _igmdpicProject = pIgmdpicProject_
, _igmdpicInstanceGroupManager = pIgmdpicInstanceGroupManager_
, _igmdpicZone = pIgmdpicZone_
, _igmdpicPayload = pIgmdpicPayload_
}
-- | Project ID for this request.
igmdpicProject :: Lens' InstanceGroupManagersDeletePerInstanceConfigs Text
igmdpicProject
= lens _igmdpicProject
(\ s a -> s{_igmdpicProject = a})
-- | The name of the managed instance group. It should conform to RFC1035.
igmdpicInstanceGroupManager :: Lens' InstanceGroupManagersDeletePerInstanceConfigs Text
igmdpicInstanceGroupManager
= lens _igmdpicInstanceGroupManager
(\ s a -> s{_igmdpicInstanceGroupManager = a})
-- | The name of the zone where the managed instance group is located. It
-- should conform to RFC1035.
igmdpicZone :: Lens' InstanceGroupManagersDeletePerInstanceConfigs Text
igmdpicZone
= lens _igmdpicZone (\ s a -> s{_igmdpicZone = a})
-- | Multipart request metadata.
igmdpicPayload :: Lens' InstanceGroupManagersDeletePerInstanceConfigs InstanceGroupManagersDeletePerInstanceConfigsReq
igmdpicPayload
= lens _igmdpicPayload
(\ s a -> s{_igmdpicPayload = a})
instance GoogleRequest
InstanceGroupManagersDeletePerInstanceConfigs
where
type Rs InstanceGroupManagersDeletePerInstanceConfigs
= Operation
type Scopes
InstanceGroupManagersDeletePerInstanceConfigs
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient
InstanceGroupManagersDeletePerInstanceConfigs'{..}
= go _igmdpicProject _igmdpicZone
_igmdpicInstanceGroupManager
(Just AltJSON)
_igmdpicPayload
computeService
where go
= buildClient
(Proxy ::
Proxy
InstanceGroupManagersDeletePerInstanceConfigsResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/InstanceGroupManagers/DeletePerInstanceConfigs.hs | mpl-2.0 | 5,600 | 0 | 18 | 1,223 | 548 | 325 | 223 | 100 | 1 |
module System.HDFS
( FileSystem
, File
, IOMode(..)
, FileInfo(..)
, withHdfs
, connect
, disconnect
, withFile
, open
, close
, write
, writeL
, flush
, read
, available
, seek
, tell
, pread
, exists
, cp
, mv
, rm
, mv'
, ls
, cwd
, cd
, mkdir
, chown
, chmod
, stat
-- * HDFS specific
-- , getHosts
-- , setReplication
-- , defaultBlocksize
-- , getCapacity
-- , getUsed
-- , utime
) where
import Prelude hiding (read)
import Control.Applicative
import Control.Exception
import Control.Monad
import Data.Int
import Foreign
import Foreign.C.Error
import Foreign.C.String
import Foreign.C.Types
import System.HDFS.Base
import qualified Data.ByteString as SB
import qualified Data.ByteString.Char8 as SBC
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString.Unsafe as UB
-- | HDFS File System Handle
newtype FileSystem = FileSystem { _hdfs :: HDFSFsPtr }
-- | HDFS File Handle
newtype File = File { _hfdsFile :: HDFSFilePtr }
-- | Modes for opening HDFS files
data IOMode = Read | Write | Append
-- | HDFS File Metadata
data FileInfo = FileInfo
{ mKind :: Char
, mName :: String
, mLastMod :: Int64
, mSize :: Int64
, mReplication :: Int16
, mBlocksize :: Int64
, mOwner :: String
, mGroup :: String
, mPermissions :: Int16
, mLastAccess :: Int64
} deriving (Show)
type Host = String
type Port = Int
type Path = String
withHdfs :: Host -> Port -> (FileSystem -> IO a) -> IO a
withHdfs h p = bracket (connect h p) disconnect
connect :: Host -> Port -> IO FileSystem
connect h p = withCString h $ \host -> do
fs <- throwErrnoIfNull "connect" $ c_hdfs_connect host (fromIntegral p)
return $ FileSystem fs
disconnect :: FileSystem -> IO ()
disconnect (FileSystem fs) =
throwErrnoIfMinus1_ "disconnect" (c_hdfs_disconnect fs)
withFile :: FileSystem -> Path -> IOMode -> (File -> IO a) -> IO a
withFile fs p m = bracket (open fs p m) (close fs)
open :: FileSystem -> Path -> IOMode -> IO File
open (FileSystem fs) p m =
withCString p $ \path -> do
f <- throwErrnoIfNull "open" $
c_hdfs_open_file fs path iomode 0 0 0 -- note: using defaults
return . File $ f
where
iomode = toHDFSIOMode m
close :: FileSystem -> File -> IO ()
close (FileSystem fs) (File f) =
throwErrnoIfMinus1_ "close" (c_hdfs_close_file fs f) >> return ()
exists :: FileSystem -> Path -> IO Bool
exists (FileSystem fs) p = do
t <- withCString p $ \path -> c_hdfs_exists fs path
return $ (0 :: CInt) == t
write :: FileSystem -> File -> SB.ByteString -> IO Int
write (FileSystem fs) (File f) b =
UB.unsafeUseAsCStringLen b $ \(cstr, len) -> do
written <- throwErrnoIfMinus1 "write" $
c_hdfs_write fs f cstr (fromIntegral len)
return . fromIntegral $ written
writeL :: FileSystem -> File -> LB.ByteString -> IO Int
writeL (FileSystem fs) (File f) lbs = do
let fn offset bs = UB.unsafeUseAsCStringLen bs $ \(cstr, len) -> do
written <- throwErrnoIfMinus1 "writeL" $
c_hdfs_write fs f cstr (fromIntegral len)
return $ offset + fromIntegral written
foldM fn 0 (LB.toChunks lbs)
read :: FileSystem -> File -> Int -> IO SB.ByteString
read (FileSystem fs) (File f) len =
allocaArray len $ \buf -> do
nread <- throwErrnoIfMinus1 "read" $
c_hdfs_read fs f buf (fromIntegral len)
SB.packCStringLen (buf, fromIntegral nread)
pread :: FileSystem -> File -> Int64 -> Int -> IO SB.ByteString
pread (FileSystem fs) (File f) offset len =
allocaArray len $ \buf -> do
nread <- throwErrnoIfMinus1 "pread" $
c_hdfs_pread fs f (fromIntegral offset) buf (fromIntegral len)
SB.packCStringLen (buf, fromIntegral nread)
flush :: FileSystem -> File -> IO ()
flush (FileSystem fs) (File f) =
throwErrnoIfMinus1_ "flush" (c_hdfs_flush fs f) >> return ()
available :: FileSystem -> File -> IO Int
available (FileSystem fs) (File f) =
fmap fromIntegral $ throwErrnoIfMinus1 "available" $ c_hdfs_available fs f
seek :: FileSystem -> File -> Int64 -> IO ()
seek (FileSystem fs) (File f) offset =
throwErrnoIfMinus1_ "seek" $ c_hdfs_seek fs f (fromIntegral offset)
tell :: FileSystem -> File -> IO Int64
tell (FileSystem fs) (File f) =
fmap fromIntegral $ throwErrnoIfMinus1 "tell" $ c_hdfs_tell fs f
cp :: FileSystem -> Path -> FileSystem -> Path -> IO ()
cp (FileSystem fs) p (FileSystem fs') p' =
withCString p $ \src -> withCString p' $ \dst ->
throwErrnoIfMinus1_ "cp" $ c_hdfs_copy fs src fs' dst
rm :: FileSystem -> Path -> IO ()
rm (FileSystem fs) p =
withCString p $ \path ->
throwErrnoIfMinus1_ "delete" $ c_hdfs_delete fs path
mv :: FileSystem -> Path -> Path -> IO ()
mv (FileSystem fs) p p' =
withCString p $ \path -> withCString p' $ \path' ->
throwErrnoIfMinus1_ "rename" $ c_hdfs_rename fs path path'
-- | Move across FileSystems
mv' :: FileSystem -> Path -> FileSystem -> Path -> IO ()
mv' (FileSystem fs) p (FileSystem fs') p' =
withCString p $ \src -> withCString p' $ \dst ->
throwErrnoIfMinus1_ "mv" $ c_hdfs_move fs src fs' dst
ls :: FileSystem -> Path -> IO [FileInfo]
ls (FileSystem fs) p =
withCString p $ \path ->
alloca $ \numptr -> do
cinfo <- c_hdfs_list_directory fs path numptr
num <- peek numptr
info <- peekArray (fromIntegral num) cinfo >>= mapM mkFileInfo
c_hdfs_free_file_info cinfo num
errNo <- getErrno
if errNo == eOK
then return info
else throwErrno "ls"
cwd :: FileSystem -> IO String
cwd (FileSystem fs) =
allocaArray 255 $ \buf -> do
wd <- throwErrnoIfNull "cwd" $
c_hdfs_get_working_directory fs buf 255
-- this is kinda silly...
SBC.unpack <$> SB.packCStringLen (wd, 255)
cd :: FileSystem -> Path -> IO ()
cd (FileSystem fs) p =
withCString p $ \path ->
throwErrnoIfMinus1_ "cd" $ c_hdfs_set_working_directory fs path
mkdir :: FileSystem -> Path -> IO ()
mkdir (FileSystem fs) p =
withCString p $ \path ->
throwErrnoIfMinus1_ "mkdir" $ c_hdfs_create_directory fs path
chown :: FileSystem -> Path -> String -> String -> IO ()
chown (FileSystem fs) p u g =
withCString p $ \path ->
withCString u $ \user ->
withCString g $ \group ->
throwErrnoIfMinus1_ "chown" $ c_hdfs_chown fs path user group
chmod :: FileSystem -> Path -> Int16 -> IO ()
chmod (FileSystem fs) p m =
withCString p $ \path ->
throwErrnoIfMinus1_ "chmod" $ c_hdfs_chmod fs path (fromIntegral m)
stat :: FileSystem -> Path -> IO FileInfo
stat (FileSystem fs) p =
withCString p $ \path -> do
cinfo <- throwErrnoIfNull "stat" (c_hdfs_get_path_info fs path)
info <- peek cinfo >>= mkFileInfo
c_hdfs_free_file_info cinfo 1
return info
-- internal
mkFileInfo :: HDFSFileInfo -> IO FileInfo
mkFileInfo (HDFSFileInfo k n l s r b o g m a) = do
name <- peekCString n
ownr <- peekCString o
grp <- peekCString g
let kind = castCCharToChar k
lmod = fromIntegral l
size = fromIntegral s
repl = fromIntegral r
blck = fromIntegral b
perm = fromIntegral m
lacc = fromIntegral a
return (FileInfo kind name lmod size repl blck ownr grp perm lacc)
toHDFSIOMode :: IOMode -> HDFSIOMode
toHDFSIOMode Read = readOnly -- ^ O_RDONLY
toHDFSIOMode Write = writeOnly -- ^ O_WRONLY
toHDFSIOMode Append = append -- ^ O_APPEND
| kim/hdfs-haskell | src/System/HDFS.hs | lgpl-3.0 | 7,398 | 0 | 18 | 1,723 | 2,639 | 1,341 | 1,298 | -1 | -1 |
{-# LANGUAGE TypeSynonymInstances, TypeOperators, ViewPatterns, FlexibleInstances, RecordWildCards, FlexibleContexts, OverlappingInstances, GeneralizedNewtypeDeriving, MultiParamTypeClasses, DeriveDataTypeable, UndecidableInstances, TypeFamilies, ScopedTypeVariables #-}
module Language.Pascal.JVM.CodeGen where
import Control.Monad
import Control.Monad.State
import Control.Monad.Exception
import Data.List (intercalate, findIndex)
import qualified Data.Map as M
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.Char (ord)
import Data.Generics
import Data.Word
import Data.Int
import qualified JVM.Builder as J
import JVM.Assembler
import JVM.ClassFile
import JVM.Exceptions
import qualified Java.Lang
import qualified Java.IO
import Language.Pascal.Types
import Language.Pascal.JVM.Types
import Language.Pascal.JVM.Builtin
instructionsByType :: [(FieldType, TypeInstructions)]
instructionsByType = [
(IntType, TypeInstructions ILOAD ISTORE IALOAD IASTORE
IRETURN (Just IF)
(Just IADD) (Just ISUB) (Just IMUL) (Just IDIV)),
(BoolType, TypeInstructions ILOAD ISTORE IALOAD IASTORE
IRETURN (Just IF)
Nothing Nothing Nothing Nothing)
]
getInstruction :: Throws (Located GeneratorError) e => String -> FieldType -> (TypeInstructions -> a) -> GenerateJvm e a
getInstruction msg t fn = do
let msg' = "Unsupported " ++ msg ++ ": " ++ show t
case lookup t instructionsByType of
Nothing -> failCheck GeneratorError msg'
Just i -> return $ fn i
getInstruction' :: Throws (Located GeneratorError) e => String -> FieldType -> (TypeInstructions -> Maybe a) -> GenerateJvm e a
getInstruction' msg t fn = do
x <- getInstruction msg t fn
let msg' = "Unsupported " ++ msg ++ ": " ++ show t
case x of
Nothing -> failCheck GeneratorError msg'
Just i -> return i
getJvmType :: Type -> FieldType
getJvmType TInteger = IntType
getJvmType TBool = BoolType
getJvmType (TRecord (Just name) _) = ObjectType name
getJvmType x = error $ "Unsupported type: " ++ show x
toBS :: String -> L.ByteString
toBS str = L.fromStrict $ B.pack $ map (fromIntegral . ord) str
instance Throws (Located GeneratorError) e => Checker (GenerateJvm e) where
type GeneralError (GenerateJvm e) = GeneratorError
enterContext c = do
st <- get
put $ st {currentContext = c : currentContext st}
dropContext = do
st <- get
case currentContext st of
[] -> failCheck GeneratorError "Internal error: empty context on dropContext!"
(_:xs) -> put $ st {currentContext = xs}
failCheck constructor msg = do
cxs <- gets currentContext
let loc = ErrorLoc 0 0 (if null cxs
then Unknown
else head cxs)
GenerateJvm $ lift $ J.Generate $ throw $ Located loc $ constructor msg
-- runCodeGen :: GenerateJvm e () -> [Instruction]
runCodeGen name gen = J.generated $ execState go J.emptyGState
where
go = do
x <- tryEMT (J.runGenerate $ runStateT (unJvm gen) (emptyGCState name))
case x of
Right result -> return result
Left err -> fail $ "code generator: " ++ show err
generateJvm :: String -> GenerateJvm e () -> J.Generate e ()
generateJvm name gen = evalStateT (unJvm gen) (emptyGCState name)
-- | Get full name of current context
getContextString :: GenerateJvm e String
getContextString = do
cxs <- gets (map contextId . filter isProgramPart . currentContext)
return $ intercalate "_" (reverse cxs)
where
isProgramPart (ForLoop _ _) = False
isProgramPart _ = True
newLabel :: GenerateJvm e String
newLabel = do
last <- gets lastLabel
modify $ \st -> st {lastLabel = last + 1}
return $ "dummy__" ++ show last
pushConst :: Lit -> GenerateJvm e ()
pushConst (LInteger i) = liftG $ J.i8 LDC1 (CInteger $ fromIntegral i)
pushConst (LBool b) = liftG $ J.i8 LDC1 (CInteger $ if b then 1 else 0)
pushConst (LString s) = liftG $ J.loadString s
getSymbolType :: Throws (Located GeneratorError) e => Id -> SymbolTable -> GenerateJvm e Type
getSymbolType name table = do
case lookupSymbol name table of
Nothing -> failCheck GeneratorError $ "Unknown symbol: " ++ name
Just symbol -> return $ symbolType symbol
getSymbol :: Throws (Located GeneratorError) e => Id -> SymbolTable -> GenerateJvm e Symbol
getSymbol name table = do
case lookupSymbol name table of
Nothing -> failCheck GeneratorError $ "Unknown symbol: " ++ name
Just symbol -> return symbol
loadVariable :: Throws (Located GeneratorError) e => Id -> SymbolTable -> GenerateJvm e ()
loadVariable name table = do
symbol <- getSymbol name table
case symbolConstValue symbol of
Just const -> pushConst const
Nothing -> if symbolContext symbol == Outside
then loadGlobal name (getJvmType $ symbolType symbol)
else loadLocal (symbolIndex symbol) (getJvmType $ symbolType symbol)
loadLocal :: Throws (Located GeneratorError) e => Int -> FieldType -> GenerateJvm e ()
loadLocal idx t = do
instruction <- getInstruction "local variable type" t tiLoad
prog <- gets programName
liftG $
J.i0 $ instruction $ fromIntegral (idx+1)
loadGlobal :: Throws (Located GeneratorError) e => Id -> FieldType -> GenerateJvm e ()
loadGlobal name t = do
prog <- gets programName
liftG $ do
J.i0 $ ALOAD_ I0
J.getField (toBS prog) (NameType (toBS name) t)
getReturnSignature :: Type -> ReturnSignature
getReturnSignature TVoid = ReturnsVoid
getReturnSignature t = Returns $ getJvmType t
getFunctionSig :: Throws (Located GeneratorError) e => Id -> SymbolTable -> GenerateJvm e MethodSignature
getFunctionSig name table = do
t <- getSymbolType name table
case t of
TFunction argTypes retType ->
return $ MethodSignature (map getJvmType argTypes)
(Returns $ getJvmType retType)
_ -> failCheck GeneratorError $ "Invalid function type: " ++ show t
getProcedureSig :: Throws (Located GeneratorError) e => Id -> SymbolTable -> GenerateJvm e MethodSignature
getProcedureSig name table = do
t <- getSymbolType name table
case t of
TFunction argTypes TVoid ->
return $ MethodSignature (map getJvmType argTypes) ReturnsVoid
_ -> failCheck GeneratorError $ "Invalid procedure type: " ++ show t
instance (CodeGen a) => CodeGen [a] where
generate list = forM_ list generate
instance (CodeGen (a TypeAnn)) => CodeGen (a :~ TypeAnn) where
generate = generate . content
instance CodeGen (Expression :~ TypeAnn) where
generate e@(content -> Variable name) = do
loadVariable name (getActualSymbols e)
generate e@(content -> ArrayItem name ix) = do
loadVariable name (getActualSymbols e)
generate ix
let t = typeOfA e
instruction <- getInstruction "array type" (getJvmType t) tiLoadArray
liftG $ J.i0 instruction
generate e@(content -> RecordField base field) = do
baseType <- getSymbolType base (getActualSymbols e)
case baseType of
TRecord (Just name) fields -> do
case lookup field fields of
Nothing -> failCheck GeneratorError $ "Unknown record field: " ++ base ++ "." ++ field
Just fieldType -> do
loadVariable base (getActualSymbols e)
liftG $
J.getField (toBS base) (NameType (toBS field) (getJvmType fieldType))
_ -> failCheck GeneratorError $ "Invalid record type: " ++ show baseType
generate e@(content -> Literal x) = pushConst x
generate e@(content -> Call name args) = do
case lookupBuiltin name of
Nothing -> do
prog <- gets programName
liftG $ J.aload_ I0
generate args
sig <- getFunctionSig name (getActualSymbols e)
liftG $ J.invokeVirtual (toBS prog) $ NameType (toBS name) sig
Just builtin -> builtin args
generate e@(content -> Op op x y) = do
generate x
generate y
let fn = case op of
Add -> tiAdd
Sub -> tiSub
Mul -> tiMul
Div -> tiDiv
_ -> error $ "Unsupported binary operation: " ++ show op
let t = getJvmType (typeOfA e)
instruction <- getInstruction' "expression type" t fn
liftG $ J.i0 instruction
generateCondition (content -> Op op x y) label
| op `elem` [IsGT, IsLT, IsEQ, IsNE] = do
generate x
generate y
let op' = case op of
IsGT -> C_GT
IsLT -> C_LT
IsNE -> C_NE
IsEQ -> C_EQ
liftG $ IF_ICMP op' `J.useLabel` label
generateCondition expr label = do
generate expr
liftG $ IF C_NE `J.useLabel` label
assign :: (Throws (Located GeneratorError) e, CodeGen val) => LValue :~ TypeAnn -> val -> GenerateJvm e ()
assign e@(content -> LVariable name) value = do
prog <- gets programName
symbol <- getSymbol name (getActualSymbols e)
let t = getJvmType $ symbolType symbol
if symbolContext symbol == Outside
then do
liftG $ J.aload_ I0
generate value
let nt = NameType (toBS name) t
liftG $ J.putField (toBS prog) nt
else do
generate value
instruction <- getInstruction "variable type" t tiStore
liftG $ J.i0 $ instruction (fromIntegral $ symbolIndex symbol + 1)
instance CodeGen (Statement :~ TypeAnn) where
generate e@(content -> Assign lvalue expr) = do
assign lvalue expr
generate e@(content -> Procedure name args) = do
case lookupBuiltin name of
Nothing -> do
liftG $ J.aload_ I0
generate args
prog <- gets programName
sig <- getProcedureSig name (getActualSymbols e)
liftG $ J.invokeVirtual (toBS prog) $ NameType (toBS name) sig
Just builtin -> builtin args
generate e@(content -> Return expr) = do
generate expr
(InFunction _ retType:_) <- gets currentContext
let t = getJvmType retType
instruction <- getInstruction "return value type" t tiReturn
liftG $ J.i0 instruction
generate (content -> Exit) = do
liftG $ J.i0 RETURN
generate (content -> IfThenElse condition ifStatements elseStatements) = do
trueLabel <- newLabel
endIf <- newLabel
let t = getJvmType (typeOfA condition)
generateCondition condition trueLabel
generate elseStatements
liftG $ GOTO `J.useLabel` endIf
liftG $ J.setLabel trueLabel
generate ifStatements
liftG $ J.setLabel endIf
instance CodeGen (Function TypeAnn) where
generate (Function {..}) = do
inContext (InFunction fnName fnResultType) $ do
let argTypes = map (symbolType . content) fnFormalArgs
argSignature = map getJvmType argTypes
retSignature = getReturnSignature fnResultType
(J.newMethod [ACC_PUBLIC] (toBS fnName) argSignature retSignature $ do
J.setMaxLocals (fromIntegral $ length fnVars + 1)
J.setStackSize 20
generate fnBody
J.i0 RETURN )
`catchG`
(\(e :: UnresolvedLabel) -> fail $ "Internal error: " ++ show e)
return ()
instance CodeGen (Program :~ TypeAnn) where
generate (content -> Program {..}) = do
prog <- gets (toBS . programName)
inContext Outside $ do
forM_ progVariables $ \var -> do
let t = getJvmType (symbolType $ content var)
liftG $ J.newField [ACC_PUBLIC] (toBS $ symbolName $ content var) t
forM_ progFunctions $ \fn ->
generate fn
init <- (J.newMethod [ACC_PUBLIC] (toBS "<init>") [] ReturnsVoid $ do
J.setStackSize 1
J.aload_ I0
J.invokeSpecial Java.Lang.object Java.Lang.objectInit
J.i0 RETURN )
`catchG`
(\(e :: UnresolvedLabel) -> fail $ "Internal error: " ++ show e)
realmain <- (J.newMethod [ACC_PUBLIC] (toBS "realmain") [J.arrayOf Java.Lang.stringClass] ReturnsVoid $ do
J.setStackSize 20
generate progBody
J.i0 RETURN)
`catchG`
(\(e :: UnresolvedLabel) -> fail $ "Internal error: " ++ show e)
(J.newMethod [ACC_PUBLIC, ACC_STATIC] (toBS "main") [J.arrayOf Java.Lang.stringClass] ReturnsVoid $ do
J.setStackSize 22
liftG $ do
J.new prog
J.dup
J.invokeSpecial prog init
J.aload_ I0
J.invokeVirtual prog realmain
J.i0 RETURN )
`catchG`
(\(e :: UnresolvedLabel) -> fail $ "Internal error: " ++ show e)
return ()
| portnov/simple-pascal-compiler | spc-jvm/Language/Pascal/JVM/CodeGen.hs | lgpl-3.0 | 12,563 | 0 | 23 | 3,241 | 4,221 | 2,033 | 2,188 | 289 | 4 |
{-# LANGUAGE NamedFieldPuns,RecordWildCards #-}
module StearnsWharf.XML.XmlLoads where
import qualified Data.Map as Map
import qualified Text.XML.Light as X
import qualified StearnsWharf.XML.Common as XC
import qualified StearnsWharf.Loads as L
import qualified StearnsWharf.Nodes as N
type LoadDef = (String,L.Load)
type LoadVectors = (Double,Double)
type LoadMap = Map.Map String L.Load
data LoadDirection = LDY | LDX deriving Eq
findDistLoads :: X.Element -> LoadDirection -> LoadVectors
findDistLoads el loadDir = result
where y' = XC.xmlAttr ys el
result = case y' of Nothing -> (y1, y2)
where y1 = maybeLoad ys1
y2 = maybeLoad ys2
Just v -> (y1,y1)
where y1 = read v
maybeLoad v = maybe 0.0 (\p -> read p) $ XC.xmlAttr v el
(ys,ys1,ys2) | loadDir == LDY = ("y","y1","y2")
| otherwise = ("x","x1","x2")
loadDef :: X.Element -> LoadDef
loadDef el = (lid, (L.Load y1 y2 x1 x2 loadfactor))
where Just lid = XC.xmlAttr "id" el
Just loadfactor = XC.xmlAttr "f" el >>= Just . read
(y1,y2) = findDistLoads el $ LDY
(x1,x2) = findDistLoads el $ LDX
createLoads :: X.Element -> LoadMap
createLoads doc = Map.fromList loadDefs
where xmlloads = XC.xmlElements "load" doc
loadDefs = map loadDef xmlloads
createPointLoad :: N.NodeMap -> X.Element -> L.PointLoad
createPointLoad nm el = L.PointLoad v node ang f
where Just f = XC.xmlAttr "f" el >>= Just . read
Just v = XC.xmlAttr "v" el >>= Just . read
Just ang = XC.xmlAttr "ang" el >>= Just . read
Just nid = XC.xmlAttr "node" el
Just node = Map.lookup nid nm
createPointLoads :: N.NodeMap -> X.Element -> [L.PointLoad]
createPointLoads nm el = map createPointLoad' xmlploads
where xmlploads = X.findElements (X.unqual "pointload") el
createPointLoad' = createPointLoad nm
| baalbek/stearnswharf | src/StearnsWharf/XML/XmlLoads.hs | lgpl-3.0 | 2,033 | 0 | 12 | 588 | 676 | 358 | 318 | 43 | 2 |
module Lib where
{-
example follows Henderson (2002)
- constructing images out of recursively-subdivided images
- each image may be modified by a spatial transformation
Basic Building Blocks
-}
data Tile
-- terminal constructors: haskell and church
haskell :: Tile
haskell = undefined
church :: Tile
church = undefined
color
:: Double -- ^ red
-> Double -- ^ green
-> Double -- ^ blue
-> Double -- ^ alpha
-> Tile
color = undefined
{-
each color should be in closed interval [0,1].
nothing in the typesystem requires this to be the case, so we will need to constrain it with a law:
∀ (r :: Double) (g :: Double) (b :: Double)
(a :: Double).
color r g b a =
color (clamp 0 1 r)
(clamp 0 1 g)
(clamp 0 1 b)
(clamp 0 1 a)
All terms in an algebra are built from
- terminal constructors, and
- inductive constructors
- ones which "derive" new terms based on existing terms
-}
-- inductive
-- rotate 90 degress C(lock)W(ise)
cw :: Tile -> Tile
cw = undefined
-- rotate 90 degress C(ounter)C(lock)W(ise)
ccw :: Tile -> Tile
ccw = undefined
{-
-- laws
∀ (t :: Tile).
cw (cw (cw (cw t))) = t
∀ (t :: Tile).
ccw (cw t) = t
∀ (t :: Tile).
cw (ccw t) = t
-- equational reasoning
ccw t
= (via cw/cw/cw/cw)
ccw (cw (cw (cw (cw t))))
= (via ccw/cw)
cw (cw (cw t))
-}
-- flip H(orizontally)
flipH :: Tile -> Tile
flipH = undefined
{-
∀ (t :: Tile).
flipH (flipH t) = t
∀ (t :: Tile).
flipH (cw (cw (flipH t)) = cw (cw t)
-}
{-
Exercise
Prove flipH . cw^{2*n} . flipH = cw^{2*n}, where the ^ operation means repeated composition. For example, cw^4 = cw . cw . cw . cw.
Horizontally flipping a clockwise rotation is equivalent to rotating counterclockwise a horizontal flip.
This law that cw to ccw under flipH transformation:
∀ (t :: Tile).
flipH (cw t) = ccw (flipH t)
-}
-- flipV(ertically)
flipV :: Tile -> Tile
flipV = undefined
{-
can derive it from cw, ccw and flipH
-- its own inverse
∀ (t :: Tile).
flipV (flipV t) = t
-- can derive it from cw, ccw and flipH
∀ (t :: Tile).
flipV t = ccw (flipH (cw t))
∀ (t :: Tile).
flipV (flipH t) = cw (cw t)
Exercise
Derive the fact that flipV is its own inverse, using any of the other laws we’ve given for our algebra.
Solution
flipV (flipV t)
= (via flipV)
flipV (ccw (flipH (cw t)))
= (via flipV)
ccw (flipH (cw (ccw (flipH (cw t)))))
= (via cw/ccw)
ccw (flipH (flipH (cw t)))
= (via flipH/flipH)
ccw (cw t)
= (via ccw/cw)
t
Exercise
Derive a proof that flipV . flipH = cw . cw
Solution
flipV (flipH t)
= (via flipV)
ccw (flipH (cw (flipH t)))
= (via ccw)
cw (cw (cw (flipH (cw (flipH t)))))
= (via x-symmetry)
cw (cw (flipH (ccw (cw (flipH t)))))
= (via ccw/cw)
cw (cw (flipH (flipH t)))
= (via flipH/flipH)
cw (cw t)
composing multiple tiles together
- every operation in algebra must take valid inputs to valid outputs
- tiles are always square
- Simply putting one square tile beside another would result in a rectangular image, INVALIDE
- to maintain closure, must subdivide square into two rectangular halves, then fill each half, stretching tiles to cover space
-}
beside :: Tile -> Tile -> Tile
beside = undefined
above :: Tile -> Tile -> Tile
above = undefined
quad :: Tile -> Tile -> Tile -> Tile -> Tile
quad = undefined
swirl :: Tile -> Tile
swirl = undefined
behind :: Tile -> Tile -> Tile
behind = undefined
{-
∀ (t1 :: Tile) (t2 :: Tile).
flipH (beside t1 t2) = beside (flipH t2) (flipH t1)
Exercise
Prove flipH (flipH (beside t1 t2)) = beside t1 t2 in two separate ways.
∀ (t1 :: Tile) (t2 :: Tile).
above t1 t2 = cw (beside (ccw t1) (ccw t2))
Intuitively, we can also rewrite an above of besides as a beside of aboves, so long as we swap the top-right and bottom-left tiles when we do so.
∀ (a :: Tile) (b :: Tile) (c :: Tile) (d :: Tile).
above (beside a b) (beside c d) =
beside (above a c) (above b d)
∀ (a :: Tile) (b :: Tile) (c :: Tile) (d :: Tile).
above (beside a b) (beside c d) = quad a b c d
As an even more special case, we can rotate one tile as we move through a quad, creating a sort of swirl effect as in figure 18. This operation is given by:
∀ (t :: Tile).
quad t (cw t) (ccw t) (cw (cw t)) = swirl t
color combinator property : unaffected by cw and flipH:
∀ (r :: Double) (g :: Double) (b :: Double)
(a :: Double).
cw (color r g b a) = color r g b a
∀ (r :: Double) (g :: Double) (b :: Double)
(a :: Double).
flipH (color r g b a) = color r g b a
-- color r g b 0 is a right-identity for behind
∀ (t :: Tile) (r :: Double) (g :: Double) (b :: Double).
behind t (color r g b 0) = t
empty :: Tile
∀ (r :: Double) (g :: Double) (b :: Double).
color r g b 0 = empty
equality of tiles to be “equal
-- cannot be definitional equality, because equation that t = cw (cw (cw (cw t))) is not equal syntactically
- two tiles are equal IFF they render to equal images : equal matrices of pixels
Semantics of tile algebra freely subdivide space, so images generated by it can have arbitrarily precise levels of detail.
-}
-- OBSERVATION of algebra
-- function “out” of tile algebra.
rasterize
:: Int -- ^ resulting width
-> Int -- ^ resulting height
-> Tile
-> [[Color]] -- ^ pixels in row-major order
rasterize = undefined
data Color
--instance Eq Color
{-
two tiles are equal IFF they produce the same image under rasterize
∀ (t1 :: Tile) (t2 :: Tile).
(∀ (w :: Int) (h :: Int).
rasterize w h t1 == rasterize w h t2) => t1 = t2
Laws that constrain the observation of tile algebra.
e.g, flipV operation moves bottom row of pixels to top, specifically, it should reverse order of the rows.
∀ (t :: Tile) (w :: Int) (h :: Int).
rasterize w h (flipV t) = reverse (rasterize w h t)
flipH should flip the pixels within each row:
∀ (t :: Tile) (w :: Int) (h :: Int).
rasterize w h (flipH t) =
fmap reverse (rasterize w h t)
To put one tile above another, split height in half, then concatenate rows of top raster with bottom.
Attention paid to the height computations because integers are not always evenly divisible;
here we will make the arbitrary decision that the bottom raster should soak up the extra tile.
∀ (t1 :: Tile) (t2 :: Tile) (w :: Int) (h :: Int).
rasterize w h (above t1 t2) =
rasterize w (div h 2) t1 <>
rasterize w (h - div h 2) t2
put tiles beside one another in a similar fashion, by gluing together each row.
first convert each raster to column-major, glue the columns together, and then convert back.
The transpose :: [[a]] -> [[a]] function can do this major-order shifting.
Here too, decide by fiat that the right-most tile absorbs the extra pixels if necessary.
There is an argument to be made here that perhaps we should “blend” the middle column, but as we will see in chapter 2.1.4, this approach doesn’t generalize nicely.
∀ (t1 :: Tile) (t2 :: Tile) (w :: Int) (h :: Int).
rasterize w h (beside t1 t2) =
transpose $
transpose (rasterize (div w 2) h t1) <>
transpose (rasterize (w - div w 2) h t2)
The clockwise cw operation requires us to rotate our rasterized matrix. I didn’t know of any built-in function to perform this operation, so I experimented until I found fmap reverse . transpose which works, though admittedly in
-}
| haroldcarr/learn-haskell-coq-ml-etc | haskell/book/2021-05-06-Algebra_Driven_Design-Sandy_Maguire/src/Lib.hs | unlicense | 7,382 | 0 | 9 | 1,696 | 258 | 157 | 101 | -1 | -1 |
-- from http://learnyouahaskell.com/functionally-solving-problems
data Section = Section { getA :: Int, getB :: Int, getC :: Int } deriving (Show)
type RoadSystem = [Section]
data Label = A | B | C deriving (Show)
type Path = [(Label, Int)]
roadStep :: (Path, Path) -> Section -> (Path, Path)
roadStep (pathA, pathB) (Section a b c) =
let priceA = sum $ map snd pathA
priceB = sum $ map snd pathB
forwardPriceToA = priceA + a
crossPriceToA = priceB + b + c
forwardPriceToB = priceB + b
crossPriceToB = priceA + a + c
newPathToA = if forwardPriceToA <= crossPriceToA
then (A, a) : pathA
else (C, c) : (B, b) : pathB
newPathToB = if forwardPriceToB <= crossPriceToB
then (B, b) : pathB
else (C, c) : (A, a) : pathA
in (newPathToA, newPathToB)
optimalPath :: RoadSystem -> Path
optimalPath roadSystem =
let (bestAPath, bestBPath) = foldl roadStep ([],[]) roadSystem
in if sum (map snd bestAPath) <= sum (map snd bestBPath)
then reverse bestAPath
else reverse bestBPath
heathrowToLondon :: RoadSystem
heathrowToLondon = [Section 50 10 30, Section 5 90 20, Section 40 2 25, Section 10 8 0]
main = putStrLn $ show $ optimalPath heathrowToLondon
| dkandalov/katas | haskell/shortest-path/shortest-path.hs | unlicense | 1,331 | 0 | 12 | 390 | 467 | 259 | 208 | 28 | 3 |
module Week2.LogAnalysis where
import Data.Char (isSpace)
import Data.List (foldl')
import Data.Maybe (fromMaybe, listToMaybe)
import Week2.Log
-- Ex. 1
parseMessage :: String -> LogMessage
parseMessage line = case line of
'I':rest -> fromMaybe unknown $ maybeReadInfo rest
'W':rest -> fromMaybe unknown $ maybeReadWarn rest
'E':rest -> fromMaybe unknown $ maybeReadErr rest
_ -> unknown
where
unknown = Unknown line
maybeReadInfo = maybeReadTime Info
maybeReadWarn = maybeReadTime Warning
maybeReadErr :: String -> Maybe LogMessage
maybeReadErr str = do
(sev, rest) <- listToMaybe $ reads str
maybeReadTime (Error sev) rest
maybeReadTime :: MessageType -> String -> Maybe LogMessage
maybeReadTime level str = do
(time, msg) <- listToMaybe $ reads str
return $ LogMessage level time $ dropWhile isSpace msg
parse :: String -> [LogMessage]
parse = map parseMessage . lines
-- Ex. 2
insert :: LogMessage -> MessageTree -> MessageTree
insert (Unknown _) tree = tree
insert msg@(LogMessage _ _ _) Leaf = Node Leaf msg Leaf
insert msg@(LogMessage _ time _) (Node left root@(LogMessage _ rTime _) right)
| time < rTime = Node (insert msg left) root right
| otherwise = Node left root (insert msg right)
-- you happy compiler?
insert _ (Node _ (Unknown _) _) = error "The tree shouldn't contain Unknown records"
-- Ex. 3
build :: [LogMessage] -> MessageTree
build = foldl' (flip insert) Leaf
-- Ex. 4
inOrder :: MessageTree -> [LogMessage]
inOrder Leaf = []
inOrder (Node left root right) = inOrder left ++ [root] ++ inOrder right
-- Ex. 5
whatWentWrong :: [LogMessage] -> [String]
whatWentWrong = map getMsg . inOrder . build . filter (isSevereError)
where
getMsg (LogMessage _ _ msg) = msg
getMsg _ = "" -- for the Unknown case again
isSevereError (LogMessage (Error sev) _ _) = sev >= 50
isSevereError _ = False
| raphaelnova/cis194 | src/Week2/LogAnalysis.hs | unlicense | 1,934 | 0 | 11 | 421 | 682 | 347 | 335 | 42 | 4 |
module Cryptography.CaesarCipher.BruteBreaker
( bruteBreak
, bruteBreakIO
) where
import Cryptography
import Cryptography.CaesarCipher
bruteBreak :: Alphabet -> String -> [(Int, String)]
bruteBreak alphabet str = zip allPossibleKeys (map (\s -> decode s alphabet str) allPossibleKeys)
where allPossibleKeys = [1..alphabetLength alphabet]
bruteBreakIO :: Alphabet -> String -> IO ()
bruteBreakIO alphabet str = mapM_ (\(key, result) -> putStrLn $ show key ++ ": " ++ result) $ bruteBreak alphabet str
| dmitmel/goiteens-hw-in-haskell | Utils/Cryptography/CaesarCipher/BruteBreaker.hs | apache-2.0 | 527 | 0 | 12 | 95 | 167 | 90 | 77 | 10 | 1 |
module Metrics.BoundedDistance
( BoundedDistance(..)
) where
import Algebra.Semiring
data BoundedDistance = BD Int
deriving(Eq)
instance Show BoundedDistance where
show (BD k)
| ( k < intMu ) = show k
| otherwise = "µ"
instance Semiring (BoundedDistance) where
add (BD x) (BD y) = BD (min x y)
zero = mu
mul (BD x) (BD y)
| (x + y < intMu) = BD (x + y)
| otherwise = mu
unit = (BD 0)
lub (BD x) (BD y)
| (max x y < intMu) = BD (max x y)
| otherwise = mu
mu :: BoundedDistance
mu = (BD intMu)
intMu :: Int
intMu = 4
| sdynerow/Semirings-Library | haskell/Metrics/BoundedDistance.hs | apache-2.0 | 565 | 0 | 11 | 157 | 287 | 146 | 141 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-- | This is a fairly simple module that just connects the smtp-mail package
-- with the tls package. Hopefully this will make it easier to connect to, say,
-- GMail.
--
-- This is just this SO answer, http://stackoverflow.com/a/13634590/34864,
-- wrapped in a module and packaged up.
module Network.Mail.SMTP.TLS
( sendMailTls
, sendMailTls'
, ciphers
, tlsParams
) where
import Control.Applicative
import Control.Monad (unless, forM_)
import Crypto.Random (CPRG)
import qualified Crypto.Random.AESCtr as RNG
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BCL
import Data.List (isPrefixOf)
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Network
import Network.Mail.Mime
import Network.Mail.SMTP
import Network.Socket
import Network.TLS
import Network.TLS.Extra
import System.IO
import Text.Printf
ciphers :: [Cipher]
ciphers = [ cipher_AES128_SHA1
, cipher_AES256_SHA1
, cipher_RC4_128_MD5
, cipher_RC4_128_SHA1
]
tlsParams :: TLSParams
tlsParams = defaultParamsClient { pCiphers = ciphers }
write :: Handle -> String -> IO ()
write h = hPrintf h "%s\r\n"
-- printf ">>> %s\n" cmd
-- hFlush stdout
waitFor :: Handle -> String -> IO ()
waitFor h str = do
ln <- hGetLine h
-- putStrLn $ "<<< " <> ln
unless (str `isPrefixOf` ln) (waitFor h str)
hFlush stdout
class Writeable a where
toCommand :: a -> BL.ByteString
toDebug :: a -> String
instance Writeable String where
toCommand = BL.fromChunks
. (:[])
. TE.encodeUtf8
. T.pack
. printf "%s\r\n"
toDebug = id
instance Writeable B.ByteString where
toCommand = BL.fromChunks . (:["\r\n"])
toDebug = T.unpack . TE.decodeUtf8
instance Writeable BCL.ByteString where
toCommand = (<> crlf)
toDebug = toDebug . mconcat . BCL.toChunks
tlsWrite :: Writeable a => Context -> a -> IO ()
tlsWrite ctx cmd = do
sendData ctx $ toCommand cmd
contextFlush ctx
-- printf ">>> %s\n" $ toDebug cmd
-- hFlush stdout
crlf :: BL.ByteString
crlf = BCL.pack "\r\n"
-- TODO: Add timeout
tlsWaitFor :: Context -> T.Text -> IO ()
tlsWaitFor ctx str = do
lns <- T.lines . TE.decodeUtf8 <$> recvData ctx
-- forM_ lns $ printf . T.unpack . ("<<< " <>) . (<> "\n")
-- hFlush stdout
case filter (T.isPrefixOf str) lns of
[] -> tlsWaitFor ctx str
_ -> return ()
tlsWriteWait :: Writeable a => Context -> a -> T.Text -> IO ()
tlsWriteWait ctx cmd waitFor = tlsWrite ctx cmd >> tlsWaitFor ctx waitFor
sendMailTls :: HostName -> UserName -> Password -> Mail -> IO ()
sendMailTls host = sendMailTls' host 587
b64 :: String -> B.ByteString
b64 = B64.encode . B.pack . map (toEnum . fromEnum)
-- TODO: option to pass in TLSParams
-- TODO: run in EitherT.
writeAddress :: Context -> T.Text -> Address -> IO ()
writeAddress ctx cmd Address{..} =
tlsWriteWait ctx (T.unpack $ cmd <> ":<" <> addressEmail <> ">") "250"
sendMailTls' :: HostName -> Int -> UserName -> Password -> Mail -> IO ()
sendMailTls' host port user passwd mail = do
g <- RNG.makeSystem
let pn = PortNumber $ fromIntegral port
h <- connectTo host pn
hSetBuffering h LineBuffering
write h "EHLO"
waitFor h "250-STARTTLS"
write h "STARTTLS"
waitFor h "220"
ctx <- contextNewOnHandle h tlsParams g
let sendLine = tlsWrite ctx
-- putStrLn "handshake"
handshake ctx
-- putStrLn "login"
tlsWriteWait ctx ("EHLO" :: String) "250"
tlsWriteWait ctx ("AUTH LOGIN" :: String) "334"
tlsWriteWait ctx (b64 user) "334"
tlsWriteWait ctx (b64 passwd) "235"
-- putStrLn "renderAndSend"
mailbs <- renderMail' mail
writeAddress ctx "MAIL FROM" $ mailFrom mail
let rcpts = concatMap (\f -> f mail) [mailTo, mailCc, mailBcc]
forM_ rcpts $ writeAddress ctx "RCPT TO"
tlsWriteWait ctx ("DATA" :: String) "354"
mapM_ sendLine . concatMap BCL.toChunks $ split mailbs
mapM_ sendLine $ BCL.toChunks dot
tlsWaitFor ctx "250"
-- putStrLn "bye"
bye ctx
where
split = map (padDot . stripCR) . BCL.split '\n'
-- remove \r at the end of a line
stripCR s = if cr `BL.isSuffixOf` s then BL.init s else s
-- duplicate . at the start of a line
padDot s = if dot `BL.isPrefixOf` s then dot <> s else s
cr = BCL.pack "\r"
dot = BCL.pack "."
| erochest/tls-smtp | Network/Mail/SMTP/TLS.hs | apache-2.0 | 5,103 | 0 | 13 | 1,492 | 1,324 | 692 | 632 | 111 | 3 |
import qualified NLP.Skladnica.Extract as E
main :: IO ()
main = do
E.mapMWEs
-- "./data/skladnica-small/"
-- "./data/skladnica-medium/"
"./data/skladnica/"
-- "./data/skladnica/NKJP_1M_7121900001/morph_17-p/"
"./data/walenty/verbs/walenty_2015_05_verbs_verified.txt"
"./data/walenty/expand.txt"
"./data/sejf/SEJF-1.1-dlcf.dic"
"./data/nkjp-small"
-- "./data/nkjp-annex"
| kawu/skladnica-with-walenty | tmp/test2.hs | bsd-2-clause | 413 | 0 | 8 | 68 | 45 | 26 | 19 | 9 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Numeric.AERN.RefinementOrder.Arbitrary
Description : random generation of tuples with various relation constraints
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
Random generation of tuples with various relation constraints.
This module is hidden and reexported via its parent RefinementOrder.
-}
module Numeric.AERN.RefinementOrder.Arbitrary where
import Prelude hiding (EQ, LT, GT)
import Numeric.AERN.Basics.PartialOrdering
import Numeric.AERN.Basics.Arbitrary
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
import Test.QuickCheck
import Test.Framework (testGroup, Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Numeric.AERN.Misc.QuickCheck
import System.IO.Unsafe
class AreaHasBoundsConstraints t where
areaSetOuterBound :: t -> (Area t) -> (Area t)
areaSetInnerBound :: t -> (Area t) -> (Area t)
{-|
Comparison with the ability to randomly generate
pairs and triples of its own elements that are in
a specific order relation (eg LT or NC).
This is to help with checking properties that
make sense only for pairs in a certain relation
where such pairs are rare.
-}
class (ArbitraryWithArea t) => ArbitraryOrderedTuple t where
{-| generator of tuples that satisfy the given relation requirements
and area restriction,
nothing if in this structure there are no tuples satisfying these requirements -}
arbitraryTupleInAreaRelatedBy ::
(Ord ix, Show ix) =>
(Area t) ->
[ix]
{-^ how many elements should be generated and with what names -} ->
[((ix, ix),[PartialOrdering])]
{-^ required orderings for some pairs of elements -} ->
Maybe (Gen [t]) {-^ generator for tuples if the requirements make sense -}
{-| generator of tuples that satisfy the given relation requirements,
nothing if in this structure there are no tuples satisfying these requirements -}
arbitraryTupleRelatedBy ::
(Ord ix, Show ix) =>
[ix]
{-^ how many elements should be generated and with what names -} ->
[((ix, ix),[PartialOrdering])]
{-^ required orderings for some pairs of elements -} ->
Maybe (Gen [t]) {-^ generator for tuples if the requirements make sense -}
arbitraryTuple ::
Int {-^ how many elements should be generated -} ->
Maybe (Gen [t]) {-^ generator for tuples if the requirements make sense -}
arbitraryTuple n = arbitraryTupleRelatedBy [1..n] []
arbitraryPairRelatedBy ::
(ArbitraryOrderedTuple t) => PartialOrdering -> Maybe (Gen (t,t))
arbitraryPairRelatedBy rel =
case arbitraryTupleRelatedBy [1,2] [((1,2),[rel])] of
Nothing -> Nothing
Just gen -> Just $
do
[e1,e2] <- gen
return (e1,e2)
arbitraryPairInAreaRelatedBy ::
(ArbitraryOrderedTuple t) =>
Area t ->
PartialOrdering ->
Maybe (Gen (t,t))
arbitraryPairInAreaRelatedBy area rel =
case arbitraryTupleInAreaRelatedBy area [1,2] [((1,2),[rel])] of
Nothing -> Nothing
Just gen -> Just $
do
[e1,e2] <- gen
return (e1,e2)
arbitraryTripleRelatedBy ::
(ArbitraryOrderedTuple t) =>
(PartialOrdering, PartialOrdering, PartialOrdering) -> Maybe (Gen (t,t,t))
arbitraryTripleRelatedBy (r1, r2, r3) =
case arbitraryTupleRelatedBy [1,2,3] constraints of
Nothing -> Nothing
Just gen -> Just $
do
[e1,e2,e3] <- gen
return (e1, e2, e3)
where
constraints = [((1,2),[r1]), ((2,3),[r2]), ((1,3),[r3])]
arbitraryTripleInAreaRelatedBy ::
(ArbitraryOrderedTuple t) =>
Area t ->
(PartialOrdering, PartialOrdering, PartialOrdering) ->
Maybe (Gen (t,t,t))
arbitraryTripleInAreaRelatedBy area (r1, r2, r3) =
case arbitraryTupleInAreaRelatedBy area [1,2,3] constraints of
Nothing -> Nothing
Just gen -> Just $
do
[e1,e2,e3] <- gen
return (e1, e2, e3)
where
constraints = [((1,2),[r1]), ((2,3),[r2]), ((1,3),[r3])]
{-| type for randomly generating single elements using the distribution of the 'ArbitraryOrderedTuple' instance -}
newtype UniformlyOrderedSingleton t = UniformlyOrderedSingleton t deriving (Show)
{-| type for randomly generating pairs of unrelated elements using the distribution of the 'ArbitraryOrderedTuple' instance -}
data TwoUniformlyOrderedSingletons t = TwoUniformlyOrderedSingletons (t,t) deriving (Show)
{-| type for randomly generating triples of unrelated elements using the distribution of the 'ArbitraryOrderedTuple' instance -}
data ThreeUniformlyOrderedSingletons t = ThreeUniformlyOrderedSingletons (t,t,t) deriving (Show)
{-| type for generating pairs distributed in such a way that all ordering relations
permitted by this structure have similar probabilities of occurrence -}
data UniformlyOrderedPair t = UniformlyOrderedPair (t,t) deriving (Show)
data TwoUniformlyOrderedPairs t = TwoUniformlyOrderedPairs ((t,t),(t,t)) deriving (Show)
data ThreeUniformlyOrderedPairs t = ThreeUniformlyOrderedPairs ((t,t),(t,t),(t,t)) deriving (Show)
data LEPair t = LEPair (t,t) deriving (Show)
data TwoLEPairs t = TwoLEPairs ((t,t),(t,t)) deriving (Show)
data ThreeLEPairs t = ThreeLEPairs ((t,t),(t,t),(t,t)) deriving (Show)
{-| type for generating triples distributed in such a way that all ordering relation combinations
permitted by this structure have similar probabilities of occurrence -}
data UniformlyOrderedTriple t = UniformlyOrderedTriple (t,t,t) deriving (Show)
instance (ArbitraryOrderedTuple t) => Arbitrary (UniformlyOrderedSingleton t) where
arbitrary =
do
[elem] <- gen
return $ UniformlyOrderedSingleton elem
where
Just gen = arbitraryTupleRelatedBy [1] []
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (UniformlyOrderedSingleton t) a
where
arbitraryWithParam area =
do
[elem] <- gen
return $ UniformlyOrderedSingleton elem
where
Just gen = arbitraryTupleInAreaRelatedBy area [1::Int] []
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (TwoUniformlyOrderedSingletons t) a
where
arbitraryWithParam area =
do
(UniformlyOrderedSingleton e1) <- arbitraryWithParam area
(UniformlyOrderedSingleton e2) <- arbitraryWithParam area
return $ TwoUniformlyOrderedSingletons (e1,e2)
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (ThreeUniformlyOrderedSingletons t) a
where
arbitraryWithParam area =
do
(UniformlyOrderedSingleton e1) <- arbitraryWithParam area
(UniformlyOrderedSingleton e2) <- arbitraryWithParam area
(UniformlyOrderedSingleton e3) <- arbitraryWithParam area
return $ ThreeUniformlyOrderedSingletons (e1,e2,e3)
instance
(ArbitraryOrderedTuple t)
=>
Arbitrary (UniformlyOrderedPair t) where
arbitrary =
do
gen <- elements gens
pair <- gen
return $ UniformlyOrderedPair pair
where
gens = catMaybes $ map arbitraryPairRelatedBy partialOrderingVariants
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (UniformlyOrderedPair t) a
where
arbitraryWithParam area =
do
gen <- elements gens
pair <- gen
return $ UniformlyOrderedPair pair
where
gens = catMaybes $ map (arbitraryPairInAreaRelatedBy area) partialOrderingVariants
instance
(ArbitraryOrderedTuple t)
=>
Arbitrary (LEPair t)
where
arbitrary =
do
gen <- elements gens
pair <- gen
return $ LEPair pair
where
gens = catMaybes $ map arbitraryPairRelatedBy [LT, LT, LT, EQ]
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (TwoUniformlyOrderedPairs t) a
where
arbitraryWithParam area =
do
(UniformlyOrderedPair p1) <- arbitraryWithParam area
(UniformlyOrderedPair p2) <- arbitraryWithParam area
return $ TwoUniformlyOrderedPairs (p1,p2)
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (ThreeUniformlyOrderedPairs t) a
where
arbitraryWithParam area =
do
(UniformlyOrderedPair p1) <- arbitraryWithParam area
(UniformlyOrderedPair p2) <- arbitraryWithParam area
(UniformlyOrderedPair p3) <- arbitraryWithParam area
return $ ThreeUniformlyOrderedPairs (p1,p2,p3)
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (LEPair t) a
where
arbitraryWithParam area =
do
gen <- elements gens
pair <- gen
return $ LEPair pair
where
gens = catMaybes $ map (arbitraryPairInAreaRelatedBy area) [LT, LT, LT, EQ]
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (TwoLEPairs t) a
where
arbitraryWithParam area =
do
(LEPair p1) <- arbitraryWithParam area
(LEPair p2) <- arbitraryWithParam area
return $ TwoLEPairs (p1,p2)
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (ThreeLEPairs t) a
where
arbitraryWithParam area =
do
(LEPair p1) <- arbitraryWithParam area
(LEPair p2) <- arbitraryWithParam area
(LEPair p3) <- arbitraryWithParam area
return $ ThreeLEPairs (p1,p2,p3)
instance
(ArbitraryOrderedTuple t)
=>
Arbitrary (UniformlyOrderedTriple t) where
arbitrary =
do
gen <- elements gens
triple <- gen
return $ UniformlyOrderedTriple triple
where
gens = catMaybes $ map arbitraryTripleRelatedBy partialOrderingVariantsTriples
instance
(ArbitraryOrderedTuple t, a ~ Area t)
=>
ArbitraryWithParam (UniformlyOrderedTriple t) a
where
arbitraryWithParam area =
do
gen <- elements gens
triple <- gen
return $ UniformlyOrderedTriple triple
where
gens = catMaybes $ map (arbitraryTripleInAreaRelatedBy area) partialOrderingVariantsTriples
propArbitraryOrderedPair ::
(ArbitraryOrderedTuple t) =>
(t -> t -> PartialOrdering) -> PartialOrdering -> Bool
propArbitraryOrderedPair compare rel =
case arbitraryPairRelatedBy rel of
Nothing -> True
Just gen ->
and $ map relOK theSample
where
theSample = unsafePerformIO $ sample' gen
relOK (e1, e2) = compare e1 e2 == rel
propArbitraryOrderedTriple ::
(ArbitraryOrderedTuple t) =>
(t -> t -> PartialOrdering) -> (PartialOrdering, PartialOrdering, PartialOrdering) -> Bool
propArbitraryOrderedTriple compare rels@(r1,r2,r3) =
case arbitraryTripleRelatedBy rels of
Nothing -> True
Just gen ->
and $ map relOK theSample
where
theSample = unsafePerformIO $ sample' $ gen
relOK (e1, e2, e3) =
and [compare e1 e2 == r1, compare e2 e3 == r2, compare e1 e3 == r3]
testsArbitraryTuple ::
(Arbitrary t,
ArbitraryOrderedTuple t) =>
(String, t, t -> t -> PartialOrdering) -> Test
testsArbitraryTuple (name, sample, compare) =
testGroup (name ++ " arbitrary ordered") $
[
testProperty "pairs" (propArbitraryOrderedPair compare)
,
testProperty "triples" (propArbitraryOrderedTriple compare)
]
| michalkonecny/aern | aern-order/src/Numeric/AERN/RefinementOrder/Arbitrary.hs | bsd-3-clause | 11,974 | 0 | 14 | 3,092 | 2,968 | 1,588 | 1,380 | 239 | 2 |
module Control.Monad.Syntax.Two where
(==<<) :: Monad m =>
(a -> b -> m c)
-> m a
-> b -> m c
(==<<) mf x b = x >>= (`mf` b)
infixl 1 ==<<
(=.<<) :: Monad m =>
(a -> b -> m c)
-> m b
-> a -> m c
(=.<<) mf x a = mf a =<< x
infixl 1 =.<<
| athanclark/composition-extra | src/Control/Monad/Syntax/Two.hs | bsd-3-clause | 288 | 0 | 10 | 117 | 155 | 84 | 71 | 13 | 1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses #-}
module Opaleye.Internal.RunQuery where
import Control.Applicative (Applicative, pure, (*>), (<*>), liftA2)
import qualified Database.PostgreSQL.Simple.Cursor as PGSC (Cursor)
import Database.PostgreSQL.Simple.Internal (RowParser)
import qualified Database.PostgreSQL.Simple.FromField as PGS
import Database.PostgreSQL.Simple.FromField
(FieldParser, fromField, pgArrayFieldParser)
import Database.PostgreSQL.Simple.FromRow (fromRow, fieldWith)
import Database.PostgreSQL.Simple.Types (fromPGArray, Only(..))
import Opaleye.Column (Column)
import Opaleye.Internal.Column (Nullable)
import qualified Opaleye.Internal.PackMap as PackMap
import qualified Opaleye.Column as C
import qualified Opaleye.Internal.Unpackspec as U
import qualified Opaleye.PGTypes as T
import qualified Opaleye.Internal.PGTypes as IPT (strictDecodeUtf8)
import qualified Data.Profunctor as P
import Data.Profunctor (dimap)
import qualified Data.Profunctor.Product as PP
import Data.Profunctor.Product (empty, (***!))
import qualified Data.Profunctor.Product.Default as D
import qualified Data.Aeson as Ae
import qualified Data.CaseInsensitive as CI
import qualified Data.Text as ST
import qualified Data.Text.Lazy as LT
import qualified Data.ByteString as SBS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Time as Time
import qualified Data.Scientific as Sci
import qualified Data.String as String
import Data.UUID (UUID)
import GHC.Int (Int32, Int64)
-- { Only needed for postgresql-simple FieldParsers
import Control.Applicative ((<$>))
import Database.PostgreSQL.Simple.FromField
(ResultError(UnexpectedNull, Incompatible), typeInfo, returnError)
import qualified Database.PostgreSQL.Simple.TypeInfo as TI
import qualified Database.PostgreSQL.Simple.Range as PGSR
import Data.Typeable (Typeable)
-- }
-- | A 'QueryRunnerColumn' @pgType@ @haskellType@ encodes how to turn
-- a value of Postgres type @pgType@ into a value of Haskell type
-- @haskellType@. For example a value of type 'QueryRunnerColumn'
-- 'T.PGText' 'String' encodes how to turn a 'T.PGText' result from the
-- database into a Haskell 'String'.
--
-- \"'QueryRunnerColumn' @pgType@ @haskellType@\" corresponds to
-- postgresql-simple's \"'FieldParser' @haskellType@\".
-- This is *not* a Product Profunctor because it is the only way I
-- know of to get the instance generation to work for non-Nullable and
-- Nullable types at once.
-- I can no longer remember what the above comment means, but it might
-- be that we can't add nullability to a RowParser, only to a
-- FieldParser, so we have to have some type that we know contains
-- just a FieldParser.
data QueryRunnerColumn pgType haskellType =
QueryRunnerColumn (U.Unpackspec (Column pgType) ()) (FieldParser haskellType)
instance Functor (FromField u) where
fmap f ~(QueryRunnerColumn u fp) = QueryRunnerColumn u ((fmap . fmap . fmap) f fp)
type FromField = QueryRunnerColumn
-- | A 'QueryRunner' specifies how to convert Postgres values (@columns@)
-- into Haskell values (@haskells@). Most likely you will never need
-- to create on of these or handle one directly. It will be provided
-- for you by the 'D.Default' 'QueryRunner' instance.
--
-- \"'QueryRunner' @columns@ @haskells@\" corresponds to
-- postgresql-simple's \"'RowParser' @haskells@\". \"'Default'
-- 'QueryRunner' @columns@ @haskells@\" corresponds to
-- postgresql-simple's \"@FromRow@ @haskells@\".
data QueryRunner columns haskells =
QueryRunner (U.Unpackspec columns ())
(columns -> RowParser haskells)
-- We never actually look at the columns except to see
-- its "type" in the case of a sum profunctor
(columns -> Bool)
-- Have we actually requested any columns? If we
-- asked for zero columns then the SQL generator will
-- have to put a dummy 0 into the SELECT statement,
-- since we can't select zero columns. In that case we
-- have to make sure we read a single Int.
--
-- NB this does have to be a function of 'columns'
-- because we have a `SumProfunctor` instance. For some
-- values of 'columns' there may be zero columns and for
-- other values one or more, for example, 'Maybe (Column
-- PGInt4)' has no columns when it is Nothing and one
-- column when it is Just.
type FromFields = QueryRunner
fieldQueryRunnerColumn :: PGS.FromField haskell => FromField pgType haskell
fieldQueryRunnerColumn = fieldParserQueryRunnerColumn fromField
fieldParserQueryRunnerColumn :: FieldParser haskell -> FromField pgType haskell
fieldParserQueryRunnerColumn = QueryRunnerColumn (P.rmap (const ()) U.unpackspecColumn)
queryRunner :: FromField a b -> FromFields (Column a) b
queryRunner qrc = QueryRunner u (const (fieldWith fp)) (const True)
where QueryRunnerColumn u fp = qrc
queryRunnerColumnNullable :: FromField a b
-> FromField (Nullable a) (Maybe b)
queryRunnerColumnNullable qr =
QueryRunnerColumn (P.lmap C.unsafeCoerceColumn u) (fromField' fp)
where QueryRunnerColumn u fp = qr
fromField' :: FieldParser a -> FieldParser (Maybe a)
fromField' _ _ Nothing = pure Nothing
fromField' fp' f bs = fmap Just (fp' f bs)
-- { Instances for automatic derivation
instance QueryRunnerColumnDefault a b =>
QueryRunnerColumnDefault (Nullable a) (Maybe b) where
queryRunnerColumnDefault = queryRunnerColumnNullable queryRunnerColumnDefault
instance QueryRunnerColumnDefault a b =>
D.Default QueryRunner (Column a) b where
def = queryRunner queryRunnerColumnDefault
-- }
-- { Instances that must be provided once for each type. Instances
-- for Nullable are derived automatically from these.
-- | A 'QueryRunnerColumnDefault' @pgType@ @haskellType@ represents
-- the default way to turn a @pgType@ result from the database into a
-- Haskell value of type @haskellType@.
--
-- \"'QueryRunnerColumnDefault' @pgType@ @haskellType@\" corresponds
-- to postgresql-simple's \"'FromField' @haskellType@\".
--
-- Creating an instance of 'QueryRunnerColumnDefault' for your own types is
-- necessary for retrieving those types from the database.
--
-- You should use one of the three methods below for writing a
-- 'QueryRunnerColumnDefault' instance.
--
-- 1. If you already have a 'FromField' instance for your @haskellType@, use
-- 'fieldQueryRunnerColumn'. (This is how most of the built-in instances are
-- defined.)
--
-- 2. If you don't have a 'FromField' instance, use
-- 'Opaleye.RunQuery.queryRunnerColumn' if possible. See the documentation for
-- 'Opaleye.RunQuery.queryRunnerColumn' for an example.
--
-- 3. If you have a more complicated case, but not a 'FromField' instance,
-- write a 'FieldParser' for your type and use 'fieldParserQueryRunnerColumn'.
-- You can also add a 'FromField' instance using this.
class QueryRunnerColumnDefault pgType haskellType where
queryRunnerColumnDefault :: QueryRunnerColumn pgType haskellType
instance QueryRunnerColumnDefault sqlType haskellType
=> D.Default FromField sqlType haskellType where
def = queryRunnerColumnDefault
instance QueryRunnerColumnDefault T.PGNumeric Sci.Scientific where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGInt4 Int where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGInt4 Int32 where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGInt8 Int64 where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGText String where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGFloat8 Double where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGBool Bool where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGUuid UUID where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGBytea SBS.ByteString where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGBytea LBS.ByteString where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGText ST.Text where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGText LT.Text where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGDate Time.Day where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGTimestamptz Time.UTCTime where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGTimestamp Time.LocalTime where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGTimestamptz Time.ZonedTime where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGTime Time.TimeOfDay where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGCitext (CI.CI ST.Text) where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGCitext (CI.CI LT.Text) where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGJson String where
queryRunnerColumnDefault = fieldParserQueryRunnerColumn jsonFieldParser
instance QueryRunnerColumnDefault T.PGJson Ae.Value where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault T.PGJsonb String where
queryRunnerColumnDefault = fieldParserQueryRunnerColumn jsonbFieldParser
instance QueryRunnerColumnDefault T.PGJsonb Ae.Value where
queryRunnerColumnDefault = fieldQueryRunnerColumn
-- No CI String instance since postgresql-simple doesn't define FromField (CI String)
arrayColumn :: Column (T.PGArray a) -> Column a
arrayColumn = C.unsafeCoerceColumn
instance (Typeable b, QueryRunnerColumnDefault a b) =>
QueryRunnerColumnDefault (T.PGArray a) [b] where
queryRunnerColumnDefault = QueryRunnerColumn (P.lmap arrayColumn c) ((fmap . fmap . fmap) fromPGArray (pgArrayFieldParser f))
where QueryRunnerColumn c f = queryRunnerColumnDefault
-- }
instance (Typeable b, PGS.FromField b, QueryRunnerColumnDefault a b) =>
QueryRunnerColumnDefault (T.PGRange a) (PGSR.PGRange b) where
queryRunnerColumnDefault = fieldQueryRunnerColumn
-- Boilerplate instances
instance Functor (FromFields c) where
fmap f (QueryRunner u r b) = QueryRunner u ((fmap . fmap) f r) b
-- TODO: Seems like this one should be simpler!
instance Applicative (FromFields c) where
pure = flip (QueryRunner (P.lmap (const ()) PP.empty)) (const False)
. pure
. pure
QueryRunner uf rf bf <*> QueryRunner ux rx bx =
QueryRunner (P.dimap (\x -> (x,x)) (const ()) (uf PP.***! ux)) ((<*>) <$> rf <*> rx) (liftA2 (||) bf bx)
instance P.Profunctor FromFields where
dimap f g (QueryRunner u r b) =
QueryRunner (P.lmap f u) (P.dimap f (fmap g) r) (P.lmap f b)
instance PP.ProductProfunctor FromFields where
empty = PP.defaultEmpty
(***!) = PP.defaultProfunctorProduct
instance PP.SumProfunctor FromFields where
f +++! g = QueryRunner (P.rmap (const ()) (fu PP.+++! gu))
(PackMap.eitherFunction fr gr)
(either fb gb)
where QueryRunner fu fr fb = f
QueryRunner gu gr gb = g
-- }
-- { Allow @postgresql-simple@ conversions from JSON types to 'String'
jsonFieldParser, jsonbFieldParser :: FieldParser String
jsonFieldParser = jsonFieldTypeParser (String.fromString "json")
jsonbFieldParser = jsonFieldTypeParser (String.fromString "jsonb")
-- typenames, not type Oids are used in order to avoid creating
-- a dependency on 'Database.PostgreSQL.LibPQ'
--
-- Eventually we want to move this to postgresql-simple
--
-- https://github.com/tomjaguarpaw/haskell-opaleye/issues/329
jsonFieldTypeParser :: SBS.ByteString -> FieldParser String
jsonFieldTypeParser jsonTypeName field mData = do
ti <- typeInfo field
if TI.typname ti == jsonTypeName
then convert
else returnError Incompatible field "types incompatible"
where
convert = case mData of
Just bs -> pure $ IPT.strictDecodeUtf8 bs
_ -> returnError UnexpectedNull field ""
-- }
prepareRowParser :: FromFields columns haskells -> columns -> RowParser haskells
prepareRowParser (QueryRunner _ rowParser nonZeroColumns) cols =
if nonZeroColumns cols
then rowParser cols
else (fromRow :: RowParser (Only Int)) *> rowParser cols
-- If we are selecting zero columns then the SQL
-- generator will have to put a dummy 0 into the
-- SELECT statement, since we can't select zero
-- columns. In that case we have to make sure we
-- read a single Int.
-- | Cursor within a transaction.
data Cursor haskells = EmptyCursor | Cursor (RowParser haskells) PGSC.Cursor
| WraithM/haskell-opaleye | src/Opaleye/Internal/RunQuery.hs | bsd-3-clause | 13,296 | 0 | 15 | 2,340 | 2,343 | 1,314 | 1,029 | 168 | 3 |
module Limonad.Templates.Shortcuts where
import Limonad.Templates.Types
import Limonad.Templates.Parser
import Limonad.Templates.Eval
renderString :: Env -> String -> IO String
renderString env = evaluate env . parseTemplate
renderFile :: Env -> FilePath -> IO String
renderFile env f = readFile f >>= renderString env | davbaumgartner/limonad | Limonad/Templates/Shortcuts.hs | bsd-3-clause | 328 | 0 | 7 | 50 | 92 | 49 | 43 | 8 | 1 |
module Cassandra.QQ (schema) where
import Data.Generics
import Language.Haskell.TH.Quote
import Language.Haskell.TH
import Cassandra.Schema
schema :: QuasiQuoter
schema = QuasiQuoter { quoteExp = quoteSchemaExp
, quotePat = undefined
, quoteType = undefined
, quoteDec = undefined
}
quoteSchemaExp s = do
loc <- location
let pos = ( loc_filename loc
, fst (loc_start loc)
, snd (loc_start loc))
theSchema <- parseSchema pos s
case checkSchema theSchema of
Right _ -> dataToExpQ (const Nothing) theSchema
Left msg -> fail msg
| bch29/cassandra-th | src/Cassandra/QQ.hs | bsd-3-clause | 652 | 0 | 13 | 204 | 178 | 94 | 84 | 19 | 2 |
module Bertrand.Shell
(Shell,
ShellDesc(..),
ShellStyle(..),
-- ShellCommand,
CommandFunc,
shell,
shellDesc,
shellStyle,
get,
put,
modify,
outputStr,
outputStrLn
) where
import Prelude hiding (putChar, putStr, putStrLn)
import qualified System.IO as IO
import Control.Monad
import qualified Control.Monad.State as ST
import qualified Control.Monad.Trans as MT
import Data.Char
import Data.List
import System.Console.Haskeline hiding (outputStr, outputStrLn)
import Debug.Trace
type Shell s = ST.StateT (ShellDesc s, s) (InputT IO)
runShell :: Shell s a -> ShellDesc s -> s -> IO (a, (ShellDesc s, s))
runShell sh sd s = runInputT defaultSettings $ ST.runStateT sh (sd, s)
-- data ShellST = ShellST [String]
-- shellST = ShellST []
data ShellDesc s = ShellDesc {commands :: [(String, CommandFunc s)],
evalFunc :: String -> Shell s (),
prompt :: s -> String,
style :: ShellStyle }
shellDesc = ShellDesc {commands = [],
evalFunc = outputStrLn,
prompt = const "> ",
style = shellStyle }
data ShellStyle = ShellStyle {startText :: String,
quitText :: String,
commandPrefix :: Char }
shellStyle = ShellStyle {startText = "",
quitText = "",
commandPrefix = ':' }
type Document = String
type CommandFunc s = [String] -> Shell s ()
--------------------------------------------------------------------------------
shell :: ShellDesc s -> s -> IO ()
shell sd s = void $ runShell sh sd s
where
sh :: Shell s ()
sh = do
io $ IO.hSetBuffering IO.stdin IO.NoBuffering
io $ IO.hSetEcho IO.stdin False
(sd, s) <- ST.get
outputStrLn $ startText $ style sd
roop
roop :: Shell s ()
roop = do
(sd, s) <- ST.get
m <- lift $ getInputLine $ prompt sd s
case m of
Nothing -> return ()
Just (c:cs) | c == commandPrefix (style sd)
-> case words cs of
[] -> do
maybe (return ())
($ tail [])
(lookup "help" $ commands sd)
roop
s:_ | s `isPrefixOf` "quit"
-> outputStrLn $ quitText $ style sd
xs -> do
maybe (outputStrLn "unknown command")
(\(_,f) -> f $ tail xs)
(find (\(s,_) -> head xs `isPrefixOf` s) (commands sd))
roop
Just cs -> do
evalFunc sd cs
roop
get :: Shell s s
get = do
(_, s) <- ST.get
return s
put :: s -> Shell s ()
put s = do
(sd, _) <- ST.get
ST.put (sd, s)
modify :: (s -> s) -> Shell s ()
modify f = do
(sd, s) <- ST.get
ST.put (sd, f s)
lift :: InputT IO a -> Shell s a
lift = ST.lift
io :: IO a -> Shell s a
io = MT.lift . ST.lift
outputChar :: Char -> Shell s ()
outputChar = io . IO.putChar
flush :: Shell s ()
flush = io $ IO.hFlush IO.stdout
outputStr :: String -> Shell s ()
outputStr = io . IO.putStr
outputStrLn :: String -> Shell s ()
outputStrLn = io . IO.putStrLn
insertAt :: Int -> a -> [a] -> [a]
insertAt i y xs = let (as,bs) = splitAt i xs
in as ++ (y:bs)
deleteAt :: Int -> [a] -> [a]
deleteAt i xs = let (as,bs) = splitAt i xs
in init as ++ bs
-- tmap :: (a -> b, c -> d) -> (a, c) -> (b, d)
-- tmap (f, g) (a, b) = (f a, g b)
| fujiy00/bertrand | src/Bertrand/Shell.hs | bsd-3-clause | 3,979 | 0 | 24 | 1,673 | 1,285 | 686 | 599 | 103 | 5 |
module Properties.JsonProperties
( jsonProps
) where
import Data.Aeson
import Test.Tasty
import Test.Tasty.QuickCheck
import Network.Syncthing.Internal
import Properties.JsonArbitrary
import Properties.JsonInstances
type EitherDeviceErrorId = Either DeviceError Device
genProp name prop = testProperty testName prop
where
testName = name ++ " == decode . encode"
prop_json :: (Eq a, FromJSON a, ToJSON a) => a -> Bool
prop_json x = Just x == (decode . encode $ x)
jsonProps :: TestTree
jsonProps = testGroup "JSON Parsers"
[ genProp "Ping" (prop_json :: Ping -> Bool)
, genProp "Version" (prop_json :: Version -> Bool)
, genProp "Completion" (prop_json :: Completion -> Bool)
, genProp "CacheEntry" (prop_json :: CacheEntry -> Bool)
, genProp "Connection" (prop_json :: Connection -> Bool)
, genProp "Connections" (prop_json :: Connections -> Bool)
, genProp "Model" (prop_json :: Model -> Bool)
, genProp "Upgrade" (prop_json :: Upgrade -> Bool)
, genProp "Ignore" (prop_json :: Ignore -> Bool)
, genProp "FileInfo" (prop_json :: FileInfo -> Bool)
, genProp "DBFile" (prop_json :: DBFile -> Bool)
, genProp "Need" (prop_json :: Need -> Bool)
, genProp "Sync" (prop_json :: Sync -> Bool)
, genProp "DeviceId" (prop_json :: EitherDeviceErrorId -> Bool)
, genProp "SystemMsg" (prop_json :: SystemMsg -> Bool)
, genProp "VersioningConfig" (prop_json :: VersioningConfig -> Bool)
, genProp "FolderConfig" (prop_json :: FolderConfig -> Bool)
, genProp "GuiConfig" (prop_json :: GuiConfig -> Bool)
, genProp "OptionsConfig" (prop_json :: OptionsConfig -> Bool)
, genProp "DeviceConfig" (prop_json :: DeviceConfig -> Bool)
, genProp "Config" (prop_json :: Config -> Bool)
, genProp "Error" (prop_json :: Error -> Bool)
, genProp "Errors" (prop_json :: Errors -> Bool)
, genProp "System" (prop_json :: System -> Bool)
, genProp "DirTree" (prop_json :: DirTree -> Bool)
, genProp "UsageReport" (prop_json :: UsageReport -> Bool)
, genProp "DeviceInfo" (prop_json :: DeviceInfo -> Bool)
, genProp "FolderInfo" (prop_json :: FolderInfo -> Bool)
, genProp "LastFile" (prop_json :: LastFile -> Bool)
]
| jetho/syncthing-hs | tests/Properties/JsonProperties.hs | bsd-3-clause | 2,541 | 0 | 9 | 757 | 673 | 373 | 300 | 44 | 1 |
module Data.CfgTests (tests) where
import qualified Data.Cfg.BnfTests
import qualified Data.Cfg.CyclicTests
import qualified Data.Cfg.EpsilonProductionsTests
import qualified Data.Cfg.FirstSetTests
import qualified Data.Cfg.FollowSetTests
import qualified Data.Cfg.FreeCfgTests
import qualified Data.Cfg.LeftFactorTests
import qualified Data.Cfg.LeftRecursionTests
import qualified Data.Cfg.LookaheadSetTests
import qualified Data.Cfg.ReachableTests
import Test.Framework(Test, testGroup)
tests :: Test
tests = testGroup "Data.Cfg" [
Data.Cfg.BnfTests.tests,
Data.Cfg.CyclicTests.tests,
Data.Cfg.FirstSetTests.tests,
Data.Cfg.EpsilonProductionsTests.tests,
Data.Cfg.FollowSetTests.tests,
Data.Cfg.FreeCfgTests.tests,
Data.Cfg.LeftFactorTests.tests,
Data.Cfg.LeftRecursionTests.tests,
Data.Cfg.LookaheadSetTests.tests,
Data.Cfg.ReachableTests.tests
]
| nedervold/context-free-grammar | tests/Data/CfgTests.hs | bsd-3-clause | 896 | 0 | 7 | 102 | 179 | 123 | 56 | 24 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Appoint.IssueStatus where
import Database.Persist.TH
data IssueStatus = Open | Closed
deriving (Show, Read, Eq)
derivePersistField "IssueStatus"
| rob-b/appoint | src/Appoint/IssueStatus.hs | bsd-3-clause | 192 | 0 | 6 | 28 | 44 | 25 | 19 | 6 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
module Mafia.Include
( getIncludeDirs
) where
import qualified Data.Text as T
import Mafia.Cabal
import Mafia.IO
import Mafia.Path
import Mafia.Error
import Mafia.P
import System.IO (IO)
import Control.Monad.Trans.Bifunctor (firstT)
import Control.Monad.Trans.Either (EitherT)
getIncludeDirs :: EitherT MafiaError IO [Path]
getIncludeDirs = do
packageDB <- firstT MafiaCabalError getPackageDB
subdirs <- getDirectoryListing (RecursiveDepth 0) packageDB
let packages = filter (extension ".conf") subdirs
concat <$> mapM readIncludeDirs packages
readIncludeDirs :: File -> EitherT MafiaError IO [Path]
readIncludeDirs package = do
contents <- readUtf8 package
return $
case contents of
Nothing -> []
Just txt -> concatMap parseLine $ T.lines txt
parseLine :: Text -> [Path]
parseLine line
| (i:is) <- T.words line
, T.toLower i == "include-dirs:"
= is
| otherwise
= []
| ambiata/mafia | src/Mafia/Include.hs | bsd-3-clause | 1,101 | 0 | 13 | 262 | 306 | 159 | 147 | 34 | 2 |
module Main where
import Control.Monad
import Data.List.Split
import Data.Monoid
import System.Environment
import System.Process
import Latex
import Rep
main :: IO ()
main = do
args <- getArgs
if length args < 2 then
putStrLn "Usage: crossword-helper $CROSS_FILE $OUTPUT_PREFIX"
else do
let [filePath, outputName] = args
rawData <- readFile filePath
let blankCrossword = constructCrossword (splitOn "\n" rawData)
filledCrossword <- fillAll blankCrossword
let puzzleFile = (outputName ++ ".tex")
let solutionFile = (outputName ++ "-solution.tex")
writeFile puzzleFile (latexUnsolved filledCrossword)
writeFile solutionFile (latexSolution filledCrossword)
void $ system $ "open " <> puzzleFile
void $ system $ "open " <> solutionFile
| oswynb/Crossword-Helper | src/Main.hs | bsd-3-clause | 857 | 0 | 15 | 221 | 225 | 111 | 114 | 24 | 2 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE JavaScriptFFI #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Program.View.GroundMapView
-- Copyright : (c) Artem Chirkin
-- License : MIT
--
-- Maintainer : Artem Chirkin <[email protected]>
-- Stability : experimental
--
--
--
-----------------------------------------------------------------------------
module Program.View.GroundMapView
( GroundMapView (..)
, createGroundMapView, drawGroundMapView
) where
import JsHs.Types
import JsHs.WebGL
import JsHs.JSString
import Control.Concurrent.Chan
import Data.List (sortOn)
import Data.IORef
import qualified Data.JSString as JSString
import Control.Concurrent (forkIO)
import Data.Function ((&))
import Control.Monad ((<=<), forever, join)
import Data.Geometry
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Program.View
import GHCJS.Concurrent
import SmallGL.WritableVectors
import JsHs.Array
import qualified Data.Geometry.Structure.PointSet as PS
viewDistance :: GLfloat
viewDistance = 200
data GroundMapCell = GroundMapCell
{ gmcVertexBuffer :: !WebGLBuffer
, gmcMapTexture :: !WebGLTexture
}
data GroundMapView = GroundMapView
{ gmvLonLatCenter :: !(Vector 2 GLfloat)
, gmvLocalCenter :: !(Vector 3 GLfloat)
, gmvTileCenter :: !(Int,Int)
, gmvCellWidth :: !GLfloat
, gmvZoomLevel :: !Int
, gmvTiles :: !(Map (Int,Int) (IORef (Maybe GroundMapCell)))
, gmvMapUrl :: !JSString
}
createGroundMapView :: WebGLRenderingContext
-> JSString
-> Int -- ^ zoom level
-> (GLfloat, Vector2 GLfloat) -- ^ view scale and shift
-> Vector 3 GLfloat -- ^ longitude, latitude, altitude of origin
-> IO GroundMapView
createGroundMapView gl mapUrl zoomlvl (vscale, vshift) lonlatalt = do
let gmv = GroundMapView (vector2 lon0 lat0) pos0 (xtile0,ytile0) tileWidth zoomlvl Map.empty mapUrl
loadingchannel <- newChan
tiles <- Map.fromList <$> mapM (\p -> (,) p <$> createGroundMapCell loadingchannel gl gmv p)
( sortOn (\(i,j) -> (i - xtile0)*(i - xtile0) + (j - ytile0)*(j - ytile0))
[(xtile0+i,ytile0+j) | i <- [- nTiles .. nTiles -1], j <- [- nTiles .. nTiles -1]]
)
_ <- forkIO . forever . Control.Monad.join $ readChan loadingchannel
return $ gmv {gmvTiles = tiles}
where
(lon, lat, _) = unpackV3 lonlatalt
-- set up the center point to real center of the tile
(xtile0,ytile0) = zoomLonLat2xy zoomlvl (lon,lat)
(lon0, lat0) = zoomXY2LonLat zoomlvl (xtile0,ytile0)
(lon1, lat1) = zoomXY2LonLat zoomlvl (xtile0+1,ytile0+1)
-- a transform from WGS'84 to our local coordinates
wgs2metric x = broadcastVector vscale *
(js_useWGS84toUTMTransform (js_createWGS84toUTMTransform lon lat) x - resizeVector vshift)
-- get center positions in local metric system
pos0 = wgs2metric (vector3 lon0 lat0 0)
pos1 = wgs2metric (vector3 lon1 lat1 0)
tileWidth = normL2 (pos1 - pos0) / sqrt 2
nTiles = ceiling $ viewDistance / tileWidth / 2
createGroundMapCell :: Chan (IO ())
-> WebGLRenderingContext
-> GroundMapView
-> (Int, Int) -- ^ tile x and y
-> IO (IORef (Maybe GroundMapCell))
createGroundMapCell loadingchannel gl GroundMapView{..} tilexy@(x,y) = do
buf <- createBuffer gl
bindBuffer gl gl_ARRAY_BUFFER buf
bufferData gl gl_ARRAY_BUFFER arrayBuffer gl_STATIC_DRAW
gmc <- newIORef Nothing
_ <- forkIO $ do
img <- createTex gmvMapUrl gmvZoomLevel tilexy
writeChan loadingchannel . withoutPreemption $ do
tex <- initTexture gl (Left img)
writeIORef gmc . Just $ GroundMapCell buf tex
return gmc
where
arrayBuffer = packPoints (groundPoints (gmvLocalCenter + vector3 xx yy 0) gmvCellWidth)
groundNormals
groundTexCoords
xx = (gmvCellWidth *) . fromIntegral $ x - fst gmvTileCenter
yy = (gmvCellWidth *) . fromIntegral $ snd gmvTileCenter - y
groundPoints :: Vector 3 GLfloat -> GLfloat -> PS.PointArray 3 GLfloat
groundPoints p side = fromList
[ p + vector3 0 (-side) 0
, p + vector3 side (-side) 0
, p + vector3 0 0 0
, p + vector3 side 0 0
]
groundNormals :: PS.PointArray 3 GLbyte
groundNormals = fromList
[ vector3 0 0 maxBound
, vector3 0 0 maxBound
, vector3 0 0 maxBound
, vector3 0 0 maxBound
]
groundTexCoords :: PS.PointArray 2 GLushort
groundTexCoords = fromList
[ vector2 minBound minBound
, vector2 maxBound minBound
, vector2 minBound maxBound
, vector2 maxBound maxBound
]
drawGroundMapView :: WebGLRenderingContext
-> (GLuint,GLuint,GLuint)
-> GroundMapView -> IO ()
drawGroundMapView gl locs gmv = do
depthMask gl False
mapM_ (drawGroundMapCell gl locs <=< readIORef) $ gmvTiles gmv
depthMask gl True
drawGroundMapCell :: WebGLRenderingContext
-> (GLuint,GLuint,GLuint)
-> Maybe GroundMapCell -> IO ()
drawGroundMapCell _ _ Nothing = return ()
drawGroundMapCell gl (ploc,_,tloc) (Just GroundMapCell {..}) = do
bindTexture gl gl_TEXTURE_2D gmcMapTexture
bindBuffer gl gl_ARRAY_BUFFER gmcVertexBuffer
vertexAttribPointer gl ploc 3 gl_FLOAT False 20 0
--vertexAttribPointer gl nloc 3 gl_BYTE True 20 12
vertexAttribPointer gl tloc 2 gl_UNSIGNED_SHORT True 20 16
drawArrays gl gl_TRIANGLE_STRIP 0 4
foreign import javascript interruptible
"var osmImg = new Image(); osmImg.addEventListener('load', function(){$c(osmImg)}); osmImg.crossOrigin = 'anonymous'; osmImg.src = $1;"
js_createTex :: JSString -> IO TexImageSource
createTex :: JSString -> Int -> (Int,Int) -> IO TexImageSource
createTex urlPat zoom (xtile,ytile)
= js_createTex
$ urlPat
& JSString.replace "${z}" (pack $ show zoom)
& JSString.replace "${x}" (pack $ show xtile)
& JSString.replace "${y}" (pack $ show ytile)
zoomLonLat2xy :: Int -> (Float, Float) -> (Int, Int)
zoomLonLat2xy zoom (lon, lat) = (xtile, ytile)
where
n = 2 ^ zoom
xtile = round $ n * ((lon + 180) / 360)
ytile = round $ n * (1 - (log(tan(lat * pi / 180) + 1/cos(lat * pi / 180)) / pi)) / 2
zoomXY2LonLat :: Int -> (Int,Int) -> (Float, Float)
zoomXY2LonLat zoom (xtile, ytile) = (lon, lat)
where
n = 2 ^ zoom
lon = fromIntegral xtile / n * 360.0 - 180.0
lat = atan(sinh(pi * (1 - 2 * fromIntegral ytile / n))) * 180 / pi
foreign import javascript unsafe "gm$createWGS84toUTMTransform($1, $2)"
js_createWGS84toUTMTransform :: Float -> Float -> JSVal
foreign import javascript unsafe "$1($2)"
js_useWGS84toUTMTransform :: JSVal -> Vector n GLfloat -> Vector n GLfloat
| achirkin/ghcjs-modeler | src/Program/View/GroundMapView.hs | bsd-3-clause | 7,109 | 15 | 21 | 1,714 | 2,062 | 1,095 | 967 | -1 | -1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Main where
import Prolog2
instance TwoD String where
next = (++ " next")
down = (++ " down")
fact1, fact2 :: Fact String String
fact1 = const [Con "likes", Con "wallace", Con "cheese"]
fact2 = const [Con "likes", Con "grommit", Con "cheese"]
fact3 = const [Con "likes", Con "wendolene", Con "sheep"]
fact4 sc = [Con "likes", Var sc "X", Var sc "Z"]
fact5 sc = [Con "likes", Var sc "Y", Var sc "Z"]
rule1 :: Rule String String
rule1 = Rule fact1 [] [] []
rule2 = Rule fact2 [] [] []
rule25 = Rule fact3 [] [] []
rule3 = Rule (\sc -> [Con "friends", Var sc "X", Var sc "Y"])
-- [NotUnify (Var "" "X") (Var "" "Y")] [fact4, fact5] []
[] [fact4, fact5] [\sc -> [Con "du", Var sc "X", Var sc "Y"]]
rule4 = Rule (\sc -> [Con "du", Var sc "D", Var sc "D"]) [] [] []
rules = [rule1, rule2, rule25, rule3, rule4]
simpleRule = [rule1, rule2, srule3]
srule3 = Rule (\sc -> [Con "friends", Var sc "X", Var sc "Y"])
[] [\sc -> [Con "likes", Var sc "X", Var sc "Y"]] []
-- bug1 = ask "" (\sc -> [Con "friends", Var sc "Who", Var sc "What"]) simpleRule
q1, q2 :: Fact String String
q1 sc = [Con "likes", Var sc "X", Con "cheese"]
q2 sc = [Con "friends", Con "wallace", Con "grommit"]
q3 sc = [Con "friends", Con "wallace", Con "wallace"]
q4 sc = [Con "friends", Var sc "Who", Con "grommit"]
q5 sc = [Con "frineds", Var sc "X", Var sc "Y"]
q6 sc = [Con "frineds", Var sc "V", Var sc "W"]
q7 sc = [Con "likes", Var sc "Who", Var sc "What"]
rules2 = [rule21, rule22]
rule21 = rule4
rule22 = Rule (\sc -> [Con "brode", Var sc "da", Var sc "de"])
[] [\sc -> [Con "du", Var sc "da", Con "cinfo"]
-- ] []
, \sc -> [Con "du", Var sc "de", Con "tirxu"]] []
patfuFact1 :: Fact String String
patfuFact1 sc = [Con "patfu", Con "zeb", Con "jon.bois.sr"]
patfuFact2 sc = [Con "patfu", Con "jon.bois.sr", Con "jon.bois.jr"]
patfuRule1 :: Rule String String
patfuRule1 = Rule patfuFact1 [] [] []
patfuRule2 = Rule patfuFact2 [] [] []
patfuRule3 = Rule (\sc -> [Con "dzena", Var sc "X", Var sc "Y"])
[] [\sc -> [Con "patfu", Var sc "X", Var sc "Y"]] []
patfuRule4 = Rule (\sc -> [Con "dzena", Var sc "da", Var sc "de"])
[] [ \sc -> [Con "patfu", Var sc "da", Var sc "di"],
\sc -> [Con "dzena", Var sc "di", Var sc "de"]] []
patfuRules = [patfuRule1, patfuRule2, patfuRule3, patfuRule4]
duRule = Rule (\sc -> [Con "du", Var sc "X", Var sc "X"]) [] [] []
listFact sc = [Con "du", list1 sc, list2]
list1 sc = List [Var sc "X", Var sc "Y", Var sc "Z"]
list2 = List [Con "1", Con "2", Con "3"]
| YoshikuniJujo/lojysamban | src/testProlog.hs | bsd-3-clause | 2,562 | 14 | 9 | 515 | 1,295 | 681 | 614 | 53 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.