code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{- Generated from "srcLoc.duck" automatically; do not edit! -}
{-# LINE 1 "srcLoc.duck" #-}
module Gen.SrcLoc where
{-# LINE 1 "srcLoc.duck" #-}
import Memory
{-# LINE 3 "srcLoc.duck" #-}
data SrcLoc = SrcNone{srcFile :: !String}
| SrcLoc{srcFile :: !String, srcLine :: !Int, srcCol :: !Int}
| SrcRng{srcFile :: !String, srcLine :: !Int, srcCol :: !Int,
srcEndLine :: !Int, srcEndCol :: !Int}
{-# LINE 3 "srcLoc.duck" #-}
instance Convert SrcLoc where
{-# LINE 3 "srcLoc.duck" #-}
value (SrcNone a) = valCons 0 [value a]
{-# LINE 3 "srcLoc.duck" #-}
value (SrcLoc a b c) = valCons 1 [value a, value b, value c]
{-# LINE 3 "srcLoc.duck" #-}
value (SrcRng a b c d e)
= valCons 2 [value a, value b, value c, value d, value e]
{-# LINE 3 "srcLoc.duck" #-}
unsafeUnvalue val
= case unsafeTag val of
0 -> SrcNone (unsafeUnvalue (unsafeUnvalCons val))
1
-> let {-# LINE 5 "srcLoc.duck" #-}
(a, b, c) = unsafeUnvalCons val
in SrcLoc (unsafeUnvalue a) (unsafeUnvalue b) (unsafeUnvalue c)
2
-> let {-# LINE 6 "srcLoc.duck" #-}
(a, b, c, d, e) = unsafeUnvalCons val
in
SrcRng (unsafeUnvalue a) (unsafeUnvalue b) (unsafeUnvalue c)
(unsafeUnvalue d)
(unsafeUnvalue e)
_ -> error "bad tag in unsafeUnvalue SrcLoc"
{-# LINE 8 "srcLoc.duck" #-}
data Loc a = L !SrcLoc !a
{-# LINE 8 "srcLoc.duck" #-}
instance (Convert a) => Convert (Loc a) where
{-# LINE 8 "srcLoc.duck" #-}
value (L a b) = valCons 0 [value a, value b]
{-# LINE 8 "srcLoc.duck" #-}
unsafeUnvalue val
= let {-# LINE 9 "srcLoc.duck" #-}
(a, b) = unsafeUnvalCons val
in L (unsafeUnvalue a) (unsafeUnvalue b)
|
girving/duck
|
duck/Gen/SrcLoc.hs
|
bsd-3-clause
| 2,036 | 0 | 13 | 749 | 531 | 277 | 254 | 37 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Spec.Logic
( logicSpecs
) where
import Prelude hiding (lookup)
import Base
import Logic
import Models (true)
import Universe
import Util (toSqlKey)
import Test.Hspec
import Spec.Helpers
import Control.Monad.State (runState)
runU :: State Universe a -> (a, Universe)
runU = flip runState empty
check' :: Universe -> Int64 -> Int64 -> Maybe TValueId
check' u s p = lookup (toSqlKey s) (toSqlKey p) u
insertTheorem' :: Int64 -> Implication PropertyId -> State Universe ()
insertTheorem' = insertTheorem . toSqlKey
logicSpecs :: Spec
logicSpecs = do
it "allows direct assertions" $ do
let (_,u) = runU $ do
assertTrait' $ (1,1) ~. True
check' u 1 1 `shouldBe` Just true
it "can deduce when traits are added" $ do
let (proofs,u) = runU $ do
insertTheorem' 1 $ (1 ==. True) =>. (2 ==. True)
assertTrait' $ (1,1) ~. True
check' u 1 2 `shouldBe` Just true
length proofs `shouldBe` 1
it "can chain deductions" $ do
let (proofs,u) = runU $ do
insertTheorem' 1 $ (1 ==. True) =>. (2 ==. True)
insertTheorem' 2 $ (2 ==. True) =>. (3 ==. True)
assertTrait' $ (1,1) ~. True
check' u 1 3 `shouldBe` Just true
length proofs `shouldBe` 2
it "can assert conjunctions" $ do
let f = (1 ==. True) =>. ((2 ==. True) &&. (3 ==. True))
(_,u) = runU $ do
insertTheorem' 1 f
assertTrait' $ (1,1) ~. True
check' u 1 2 `shouldBe` Just true
check' u 1 3 `shouldBe` Just true
it "can assert disjunctions (if most parts are known)" $ do
let f = (1 ==. True) =>. ((2 ==. True) ||. (3 ==. True))
(_,u) = runU $ do
insertTheorem' 1 f
_ <- assertTrait' $ (1,2) ~. False
assertTrait' $ (1,1) ~. True
check' u 1 3 `shouldBe` Just true
|
jamesdabbs/pi-base-2
|
test/Spec/Logic.hs
|
bsd-3-clause
| 1,849 | 0 | 19 | 509 | 758 | 388 | 370 | 51 | 1 |
module Sexy.Instances.DesnocMay () where
import Sexy.Instances.DesnocMay.List ()
import Sexy.Instances.DesnocMay.Maybe ()
|
DanBurton/sexy
|
src/Sexy/Instances/DesnocMay.hs
|
bsd-3-clause
| 123 | 0 | 4 | 11 | 30 | 21 | 9 | 3 | 0 |
module Language.LaTeX.Builder.Rotating (pkg, turn) where
import Language.LaTeX.Types
import qualified Language.LaTeX.Builder.Internal as BI
pkg :: PackageName
pkg = BI.pkgName "rotating"
turn :: Int -> LatexItem -> LatexItem
turn i = BI.latexEnvironment "turn" [BI.packageDependency pkg, BI.mandatory (BI.num i)]
|
np/hlatex
|
Language/LaTeX/Builder/Rotating.hs
|
bsd-3-clause
| 316 | 0 | 10 | 39 | 99 | 57 | 42 | 7 | 1 |
{-# LINE 1 "Data.Void.hs" #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE Safe #-}
{-# LANGUAGE StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2008-2014 Edward Kmett
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : portable
--
-- A logically uninhabited data type, used to indicate that a given
-- term should not exist.
--
-- @since 4.8.0.0
----------------------------------------------------------------------------
module Data.Void
( Void
, absurd
, vacuous
) where
import Control.Exception
import Data.Data
import Data.Ix
import GHC.Generics
-- | Uninhabited data type
--
-- @since 4.8.0.0
data Void deriving (Generic)
deriving instance Data Void
instance Eq Void where
_ == _ = True
instance Ord Void where
compare _ _ = EQ
-- | Reading a 'Void' value is always a parse error, considering
-- 'Void' as a data type with no constructors.
instance Read Void where
readsPrec _ _ = []
instance Show Void where
showsPrec _ = absurd
instance Ix Void where
range _ = []
index _ = absurd
inRange _ = absurd
rangeSize _ = 0
instance Exception Void
-- | Since 'Void' values logically don't exist, this witnesses the
-- logical reasoning tool of \"ex falso quodlibet\".
--
-- @since 4.8.0.0
absurd :: Void -> a
absurd a = undefined -- case a of {}
-- | If 'Void' is uninhabited then any 'Functor' that holds only
-- values of type 'Void' is holding no values.
--
-- @since 4.8.0.0
vacuous :: Functor f => f Void -> f a
vacuous = fmap absurd
|
phischu/fragnix
|
builtins/base/Data.Void.hs
|
bsd-3-clause
| 1,767 | 0 | 7 | 355 | 258 | 150 | 108 | -1 | -1 |
module HSync.Server.Types where
import HSync.Server.Import.NoFoundation
--------------------------------------------------------------------------------
data AccessOptionType = ByAnonimous | ByPassword | ByUser
deriving (Show,Read,Eq,Ord)
instance PathPiece AccessOptionType where
toPathPiece = toPathPieceShow
fromPathPiece = fromPathPieceRead
|
noinia/hsync-server
|
src/HSync/Server/Types.hs
|
bsd-3-clause
| 379 | 0 | 6 | 60 | 65 | 39 | 26 | 7 | 0 |
{-# LANGUAGE ForeignFunctionInterface #-}
{-|
Module : Finance.Blpapi.Impl.ServiceImpl
Description : FFI for Service
Copyright : Bloomberg Finance L.P.
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : *nix, windows
-}
module Finance.Blpapi.Impl.ServiceImpl where
import Control.Monad
import Foreign hiding (unsafePerformIO)
import Foreign.C.String
import Finance.Blpapi.Impl.ErrorImpl
import Finance.Blpapi.Impl.RequestImpl
import Finance.Blpapi.Impl.SchemaImpl
import Finance.Blpapi.Service
newtype OperationImpl = OperationImpl (Ptr OperationImpl)
newtype ServiceImpl = ServiceImpl (Ptr ServiceImpl)
foreign import ccall safe "blpapi_service.h blpapi_Operation_name"
blpapi_Operation_name :: Ptr OperationImpl -> IO CString
foreign import ccall safe "blpapi_service.h blpapi_Operation_description"
blpapi_Operation_description :: Ptr OperationImpl -> IO CString
foreign import ccall safe "blpapi_service.h blpapi_Operation_requestDefinition"
blpapi_Operation_requestDefinition :: Ptr OperationImpl
-> Ptr (Ptr SchemaDefinitionImpl)
-> IO Int
foreign import ccall safe
"blpapi_service.h blpapi_Operation_numResponseDefinitions"
blpapi_Operation_numResponseDefinitions :: Ptr OperationImpl -> IO Int
foreign import ccall safe
"blpapi_service.h blpapi_Operation_responseDefinition"
blpapi_Operation_responseDefinition:: Ptr OperationImpl
-> Ptr (Ptr SchemaDefinitionImpl)
-> Int
-> IO Int
foreign import ccall safe "blpapi_service.h blpapi_Service_name"
blpapi_Service_name :: Ptr ServiceImpl -> IO CString
foreign import ccall safe "blpapi_service.h blpapi_Service_description"
blpapi_Service_description :: Ptr ServiceImpl -> IO CString
foreign import ccall safe "blpapi_service.h blpapi_Service_numOperations"
blpapi_Service_numOperations :: Ptr ServiceImpl -> IO Int
foreign import ccall safe "blpapi_service.h blpapi_Service_numEventDefinitions"
blpapi_Service_numEventDefinitions :: Ptr ServiceImpl -> IO Int
foreign import ccall safe "blpapi_service.h blpapi_Service_addRef"
blpapi_Service_addRef :: Ptr ServiceImpl -> IO Int
foreign import ccall safe "blpapi_service.h &blpapi_Service_release"
blpapi_Service_release :: FunPtr (Ptr ServiceImpl -> IO ())
foreign import ccall safe
"blpapi_service.h blpapi_Service_authorizationServiceName"
blpapi_Service_authorizationServiceName :: Ptr ServiceImpl -> IO CString
foreign import ccall safe "blpapi_service.h blpapi_Service_getOperationAt"
blpapi_Service_getOperationAt :: Ptr ServiceImpl
-> Ptr (Ptr OperationImpl)
-> Int
-> IO Int
foreign import ccall safe
"blpapi_service.h blpapi_Service_getEventDefinitionAt"
blpapi_Service_getEventDefinitionAt :: Ptr ServiceImpl
-> Ptr (Ptr SchemaDefinitionImpl)
-> Int
-> IO Int
foreign import ccall safe "blpapi_service.h blpapi_Service_createRequest"
blpapi_Service_createRequest :: Ptr ServiceImpl
-> Ptr (Ptr RequestImpl)
-> CString
-> IO Int
foreign import ccall safe
"blpapi_service.h blpapi_Service_createAuthorizationRequest"
blpapi_Service_createAuthorizationRequest :: Ptr ServiceImpl
-> Ptr (Ptr RequestImpl)
-> CString
-> IO Int
convertOperationImpl :: Ptr OperationImpl -> IO Operation
convertOperationImpl ptr = do
name <- blpapi_Operation_name ptr >>= peekCString
des <- blpapi_Operation_description ptr >>= peekCString
req <- alloca $ \reqHandle -> do
rc <- blpapi_Operation_requestDefinition ptr reqHandle
failIfBadErrorCode rc
imp <- peek reqHandle
convertSchemaDefinitionImpl imp
num <- blpapi_Operation_numResponseDefinitions ptr
responseList <- forM [0..(num-1)] (responseDefintion ptr)
return $ Operation name des req responseList
where
responseDefintion p ind = alloca $ \resHandle -> do
rc <- blpapi_Operation_responseDefinition p resHandle ind
failOrConvert rc resHandle
failOrConvert :: Int -> Ptr (Ptr SchemaDefinitionImpl) -> IO SchemaDefinition
failOrConvert rc resHandle = do
failIfBadErrorCode rc
imp <- peek resHandle
convertSchemaDefinitionImpl imp
convertServiceImpl :: Ptr ServiceImpl -> IO Service
convertServiceImpl ptr = do
name <- blpapi_Service_name ptr >>= peekCString
des <- blpapi_Service_description ptr >>= peekCString
authServiceName <- blpapi_Service_authorizationServiceName ptr
>>= peekCString
numOp <- blpapi_Service_numOperations ptr
numSer <- blpapi_Service_numEventDefinitions ptr
opList <- forM [0..(numOp-1)] $ opDefinition ptr
evList <- forM [0..(numSer-1)] $ evDefinition ptr
return $ Service name des authServiceName opList evList
where
opDefinition p ind = alloca $ \resHandle -> do
rc <- blpapi_Service_getOperationAt p resHandle ind
failIfBadErrorCode rc
imp <- peek resHandle
convertOperationImpl imp
evDefinition p ind = alloca $ \resHandle -> do
rc <- blpapi_Service_getEventDefinitionAt p resHandle ind
failOrConvert rc resHandle
|
bitemyapp/blpapi-hs
|
src/Finance/Blpapi/Impl/ServiceImpl.hs
|
mit
| 5,975 | 0 | 13 | 1,752 | 1,091 | 530 | 561 | 99 | 1 |
module Main where
import Test.Hspec
import Spec
main :: IO ()
main = hspec spec
|
dzotokan/minions-api
|
test/Main.hs
|
mit
| 82 | 0 | 6 | 17 | 30 | 17 | 13 | 5 | 1 |
{-# LANGUAGE TypeOperators, TypeFamilies, GADTs, KindSignatures #-}
{-# LANGUAGE ExistentialQuantification, ScopedTypeVariables, PatternGuards #-}
{-# LANGUAGE MagicHash, ConstraintKinds, ViewPatterns, MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts, FlexibleInstances #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE UndecidableInstances #-} -- see below
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
----------------------------------------------------------------------
-- |
-- Module : LambdaCCC.Lambda
-- Copyright : (c) 2013 Tabula, Inc.
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Statically typed lambda expressions
----------------------------------------------------------------------
module LambdaCCC.Lambda
( Name
, V(..), Pat(..), E(..)
, occursVP, occursVE, occursPE
, (@^), lam, lett
, (#), caseEither
, var#, lamv#, varPat#, asPat#, casev#
, reifyE, evalE
, vars, vars2
, xor -- from Prim -- TODO: maybe remove
, intL
, EP, appP, lamP, lettP , varP#, lamvP#, letvP#, casevP#, eitherEP,reifyOopsEP#
, reprEP, abstEP, ifEP, bottomEP, loopEP
-- , coerceEP
, evalEP, reifyEP, kPrimEP, kLit -- , oops
, IfCirc, if', CircuitLoopKon
) where
import Data.Functor ((<$>))
import Control.Applicative (Applicative(..),liftA2)
import Control.Arrow ((&&&))
import Data.Typeable (Typeable)
import Data.Coerce (Coercible,coerce)
import Data.Maybe (fromMaybe,catMaybes,listToMaybe)
-- import Data.Coerce (Coercible,coerce)
import Text.Printf (printf)
import Debug.Trace (trace)
import Unsafe.Coerce (unsafeCoerce) -- TEMP
import GHC.Pack (unpackCString#)
import GHC.Prim (Addr#)
import Data.Proof.EQ
import TypeUnary.Nat (IsNat(..),Nat(..))
import TypeUnary.Vec (Vec(..),Z,S)
import Circat.Category (Rep,HasRep(..),RepCat(..),LoopCat(..))
import Circat.Prim
import Circat.Classes
import Circat.Circuit ((:>))
import LambdaCCC.Misc hiding (Eq'(..), (==?))
import LambdaCCC.ShowUtils
-- Whether to sugar during show, including 'let'
#define Sugared
-- Whether to simplify during construction
#define Simplify
-- | Variable names
type Name = String
-- | Typed variable. Phantom
data V a = V Name
instance Show (V a) where
showsPrec _ (V n) = showString n
varName :: V a -> Name
varName (V name) = name
instance Eq (V a) where (==) = (====)
-- instance Eq1' V where
-- (====) = (===)
-- instance Eq' (V a) (V b) where
-- V a === V b = a == b
instance Eq1' V where
V a ==== V b = a == b
infixr 1 :$
infixr 8 :@
-- | Lambda patterns
data Pat :: * -> * where
UnitPat :: Pat Unit
VarPat :: V a -> Pat a
(:$) :: Pat a -> Pat b -> Pat (a :* b)
(:@) :: Pat a -> Pat a -> Pat a
-- ZeroPat :: Pat (Nat Z)
-- SuccPat :: Pat (Nat m) -> Pat (Nat (S m))
-- NOTE: ":@" is named to suggest "as patterns", but is more general ("and patterns").
-- TODO: Rename UnitPat and VarPat to PUnit and PVar
instance Eq1' Pat where
UnitPat ==== UnitPat = True
VarPat v ==== VarPat v' = v ==== v'
(a :$ b) ==== (a' :$ b') = a ==== a' && b ==== b'
(a :@ b) ==== (a' :@ b') = a ==== a' && b ==== b'
-- ZeroPat ==== ZeroPat = True
-- SuccPat m ==== SuccPat m' = m ==== m'
_ ==== _ = False
instance Eq (Pat a) where (==) = (====)
instance Show (Pat a) where
showsPrec _ UnitPat = showString "()"
showsPrec p (VarPat v) = showsPrec p v
showsPrec p (a :$ b) = showsPair p a b
showsPrec p (a :@ b) = showsOp2' "@" (8,AssocRight) p a b
-- showsPrec _ ZeroPat = showString "Zero"
-- showsPrec p (SuccPat m) = showsApp1 "Succ" p m
-- | Does a variable occur in a pattern?
occursVP :: V a -> Pat b -> Bool
occursVP _ UnitPat = False
occursVP v (VarPat v') = varName v == varName v'
occursVP v (a :$ b) = occursVP v a || occursVP v b
occursVP v (a :@ b) = occursVP v a || occursVP v b
-- occursVP _ ZeroPat = False
-- occursVP v (SuccPat m) = occursVP v m
-- TODO: Pull v out of the recursion.
{-
-- | Does any variable from the first pattern occur in the second?
occursPP :: Pat a -> Pat b -> Bool
occursPP UnitPat _ = False
occursPP (VarPat v) q = occursVP v q
occursPP (PairPat a b) q = occursPP a q || occursPP b q
occursPP (AndPat a b) q = occursPP a q || occursPP b q
-}
#ifdef Simplify
-- | Substitute in a pattern
substVP :: V a -> Pat a -> Unop (Pat b)
substVP v p = substIn
where
substIn :: Unop (Pat c)
substIn UnitPat = UnitPat
substIn (VarPat ((v ===?) -> Just Refl)) = p
substIn (a :$ b) = substIn a :$ substIn b
substIn (a :@ b) = substIn a :@ substIn b
-- substIn ZeroPat = ZeroPat
-- substIn (SuccPat m) = SuccPat (substIn m)
substIn q@(VarPat _) = q
#endif
infixl 9 :^
-- | Lambda expressions
data E :: (* -> *) -> (* -> *) where
Var :: forall p a . V a -> E p a
ConstE :: forall p a . p a -> E p a
(:^) :: forall p b a . E p (a :=> b) -> E p a -> E p b
Lam :: forall p a b . Pat a -> E p b -> E p (a :=> b)
Either :: forall p a b c. E p (a -> c) -> E p (b -> c) -> E p (a :+ b -> c)
Loop :: forall p a b s. CircuitLoopKon s =>
E p (a :* s -> b :* s) -> E p (a -> b)
type CircuitLoopKon s = LoopKon (:>) s
-- CoerceE :: forall p a b . (Typeable a, Typeable b, Coercible a b) =>
-- E p a -> E p b
-- The explicit universals come from ghci's ":ty" command with ":set
-- -fprint-explicit-foralls", so that I can get the order right when
-- constructing Core programmatically.
-- | A variable occurs freely in an expression
occursVE :: V a -> E p b -> Bool
occursVE v@(V name) = occ
where
occ :: E p c -> Bool
occ (Var (V name')) = name == name'
occ (ConstE {}) = False
occ (f :^ e) = occ f || occ e
occ (Lam p e) = not (occursVP v p) && occ e
occ (Either f g) = occ f || occ g
occ (Loop h) = occ h
-- occ (CoerceE e) = occ e
-- | Some variable in a pattern occurs freely in an expression
occursPE :: Pat a -> E p b -> Bool
occursPE UnitPat = pure False
occursPE (VarPat v) = occursVE v
occursPE (p :$ q) = liftA2 (||) (occursPE p) (occursPE q)
occursPE (p :@ q) = liftA2 (||) (occursPE p) (occursPE q)
-- occursPE ZeroPat = pure False
-- occursPE (SuccPat m) = occursPE m
-- I've placed the quantifiers explicitly to reflect what I learned from GHCi
-- (In GHCi, use ":set -fprint-explicit-foralls" and ":ty (:^)".)
-- When I said "forall a b" in (:^), GHC swapped them back. Oh well.
instance Eq1' p => Eq1' (E p) where
Var v ==== Var v' = v ==== v'
ConstE x ==== ConstE x' = x ==== x'
(f :^ a) ==== (f' :^ a') = a ==== a' && f ==== f'
Lam p e ==== Lam p' e' = p ==== p' && e ==== e'
-- CoerceE e ==== CoerceE e' = e ==== e'
_ ==== _ = False
-- instance Eq1' p => Eq' (E p a) (E p b) where
-- (===) = (====)
instance Eq1' p => Eq (E p a) where (==) = (====)
{-
varT :: HasTy a => Name -> E p a
varT nm = Var (V nm typ)
constT :: HasTy a => Prim a -> E p a
constT p = ConstE p typ
var# :: forall a. Addr# -> Ty a -> E p a
var# addr ty = Var (V (unpackCString# addr) ty)
varPat# :: forall a. Addr# -> Ty a -> Pat a
varPat# addr ty = VarPat (V (unpackCString# addr) ty)
asPat# :: forall a. Addr# -> Pat a -> Pat a
asPat# addr pat = varPat# addr (patTy pat) :@ pat
-}
infixl 9 @^
-- | Smart application
(@^) :: forall b a p . E p (a :=> b) -> E p a -> E p b
#ifdef Simplify
-- ...
#endif
f @^ a = f :^ a
#ifdef Simplify
{-
patToE :: Pat a -> E p a
patToE UnitPat = ConstE (LitP ()) Unit
patToE (VarPat v) = Var v
patToE (PairPat p q) | HasTy <- patHasTy p, HasTy <- patHasTy q
= patToE p # patToE q
patToE (AndPat _ _) = error "patToE: AndPat not yet handled"
-}
-- Instead, generate *all* expressions for a pattern, forking at an AndPat.
patToEs :: PrimBasics p => Pat a -> [E p a]
patToEs UnitPat = pure $ ConstE unitP
patToEs (VarPat v) = pure $ Var v
patToEs (p :$ q) = liftA2 (#) (patToEs p) (patToEs q)
patToEs (p :@ q) = patToEs p ++ patToEs q
-- patToEs ZeroPat = error "patToEs: not yet handling ZeroPat"
-- patToEs (SuccPat _) = error "patToEs: not yet handling SuccPat"
-- patToEs ZeroPat = pure $ ConstE zeroP
-- patToEs (SuccPat m) = pure $ ConstE (succP m)
-- Will there really be term-level versions of singletons?
#endif
-- TODO: watch out for repeated (++)
lam :: (PrimBasics p, Eq1' p) =>
Pat a -> E p b -> E p (a -> b)
#ifdef Simplify
-- Eta-reduction
-- lam p (f :^ u) | Just Refl <- patTy p `tyEq` expTy u
-- , u == patToE p
-- , not (p `occursPE` f)
-- = f
lam p (f :^ u) | Refl : _ <- catMaybes ((u ===?) <$> patToEs p)
, not (p `occursPE` f)
= f
-- TODO: Look for more efficient implementation rather than generate expressions
-- and test for equality.
-- Re-nest lambda patterns
lam p (Lam q w :^ Var v) | occursVP v p && not (occursVE v w) =
lam (substVP v q p) w
#endif
lam p body = Lam p body
{-
lamv# :: forall a b. Addr# -> Ty a -> E p b -> E p (a -> b)
lamv# addr ty body = lam (VarPat (V (unpackCString# addr) ty)) body
-}
-- | Let expression (beta redex)
lett :: forall a b p. (PrimBasics p, Eq1' p) =>
Pat a -> E p a -> E p b -> E p b
lett UnitPat _ body = body -- Warning: may gain termination.
lett pat e body = lam pat body @^ e
infixr 1 #
(#) :: PrimBasics p => E p a -> E p b -> E p (a :* b)
-- (ConstE Exl :^ p) # (ConstE Exr :^ p') | ... = ...
a # b = ConstE pairP @^ a @^ b
-- Handle surjectivity in @^ rather than here.
eitherE :: forall p a c b . E p (a -> c) -> E p (b -> c) -> E p (a :+ b -> c)
eitherE = Either -- for now
-- The order a c b matches either
-- | Encode a case expression on 'Left' & 'Right'.
caseEither :: forall p a b c . (PrimBasics p, Eq1' p) =>
Pat a -> E p c -> Pat b -> E p c -> E p (a :+ b) -> E p c
caseEither p u q v ab = (lam p u `eitherE` lam q v) @^ ab
-- coerceE :: forall p a b . (Typeable a, Typeable b, Coercible a b) => E p a -> E p b
-- coerceE = CoerceE
instance (HasOpInfo prim, Show' prim, PrimBasics prim, Eq1' prim)
=> Show (E prim a) where
#ifdef Sugared
-- showsPrec p (Either (Lam q a) (Lam r b) :^ ab) =
-- showParen (p > 0) $
-- showString "case " . showsPrec 0 ab . showString " of { "
-- . showsPrec 0 q . showString " -> " . showsPrec 0 a . showString " ; "
-- . showsPrec 0 r . showString " -> " . showsPrec 0 b . showString " } "
showsPrec p (Lam q body :^ rhs) = -- beta redex as "let"
showParen (p > 0) $
showString "let " . showsPrec 0 q . showString " = " . showsPrec 0 rhs
. showString " in " . showsPrec 0 body
showsPrec p (ConstE ((==== pairP) -> True) :^ u :^ v)
= showsPair p u v
#endif
showsPrec p (ConstE prim :^ u :^ v) | Just (OpInfo op fixity) <- opInfo prim =
showsOp2' op fixity p u v
showsPrec _ (Var (V n)) = showString n
showsPrec p (ConstE c) = showsPrec' p c
showsPrec p (u :^ v) = showsApp p u v
showsPrec p (Lam q e) =
showParen (p > 0) $
showString "\\ " . showsPrec 0 q . showString " -> " . showsPrec 0 e
showsPrec p (Either f g) = showsOp2' "|||" (2,AssocRight) p f g
showsPrec p (Loop h) = showsApp1 "loop" p h
-- showsPrec p (CoerceE e) = showsApp1 "coerce" p e
-- TODO: Multi-line pretty printer with indentation
data OpInfo = OpInfo String Fixity
class HasOpInfo p where
opInfo :: p a -> Maybe OpInfo
instance HasOpInfo Prim where
opInfo MulP = Just $ OpInfo "*" (7,AssocLeft )
opInfo AddP = Just $ OpInfo "+" (6,AssocLeft )
opInfo SubP = Just $ OpInfo "-" (6,AssocLeft )
opInfo AndP = Just $ OpInfo "&&" (3,AssocRight)
opInfo OrP = Just $ OpInfo "||" (2,AssocRight)
opInfo XorP = Just $ OpInfo "`xor`" (2,AssocRight)
opInfo EqP = Just $ OpInfo "==" (4,AssocNone )
opInfo NeP = Just $ OpInfo "/=" (4,AssocNone )
opInfo LtP = Just $ OpInfo "<" (4,AssocNone )
opInfo GtP = Just $ OpInfo ">" (4,AssocNone )
opInfo LeP = Just $ OpInfo "<=" (4,AssocNone )
opInfo GeP = Just $ OpInfo ">=" (4,AssocNone )
opInfo _ = Nothing
-- | Single variable binding
data Bind = forall a. Bind (V a) a
-- | Variable environment
type Env = [Bind]
reifyOops# :: Addr# -> E p a
reifyOops# addr = error ("reifyE: not handled: " ++ unpackCString# addr)
reifyE :: a -> E p a
reifyE _ = error (printf "reifyE: Oops -- not eliminated.")
{-# NOINLINE reifyE #-} -- to give reify/eval rules a chance
{-# RULES
"reifyE/evalE" forall e. reifyE (evalE e) = e
-- "evalE/reifyE" forall x. evalE (reifyE x) = x
"reifyEP/evalEP" forall e. reifyEP (evalEP e) = e
-- "evalEP/reifyEP" forall x. evalEP (reifyEP x) = x
#-}
-- We evaluate *closed* expressions (no free variables)
instance (HasOpInfo p, Show' p, EvalableP p, Eq1' p, PrimBasics p) =>
Evalable (E p a) where
type ValT (E p a) = a
eval = evalE
evalE :: (HasOpInfo p, Show' p, EvalableP p, Eq1' p, PrimBasics p) =>
E p a -> a
evalE e = trace ("evalE: " ++ show e) $
eval' e [] -- provide empty environment
-- TODO: Rework so that eval' can work independently of env. Will save repeated
-- evals.
-- Expression evaluation requires a binding environment. In other words,
-- expressions evaluate to a function from environments.
eval' :: (HasOpInfo p, Show' p, EvalableP p) =>
E p a -> Env -> a
#if 1
eval' (Var v) env = fromMaybe (error $ "eval': unbound variable: " ++ show v) $
lookupVar v env
eval' (ConstE p) _ = evalP p
eval' (u :^ v) env = (eval' u env) (eval' v env)
eval' (Lam p e) env = \ x -> eval' e (extendEnv p x env)
eval' (Either f g) env = eval' f env `either` eval' g env
eval' (Loop h) env = loop (eval' h env)
-- eval' (CoerceE e) env = coerce (eval' e env)
#else
-- More efficiently, traverse the expression just once, even under lambdas:
eval' (Var v) = fromMaybe (error $ "eval': unbound variable: " ++ show v) .
lookupVar v
eval' (ConstE p) = const (evalP p)
eval' (u :^ v) = eval' u <*> eval' v
eval' (Lam p e) = (fmap.fmap) (eval' e) (flip (extendEnv p))
eval' (Either f g) = liftA2 either (eval' f) (eval' g)
-- eval' (CoerceE e) = coerce (eval' e)
-- Derivation of Lam case:
--
-- \ env -> \ x -> eval' e (extendEnv p x env)
-- == \ env -> \ x -> eval' e (flip (extendEnv p) env x)
-- == \ env -> eval' e . flip (extendEnv p) env
-- == \ env -> fmap (eval' e) (flip (extendEnv p) env)
-- == fmap (eval' e) . flip (extendEnv p)
-- == (fmap.fmap) (eval' e) (flip (extendEnv p))
#endif
extendEnv :: Pat b -> b -> (Env -> Env)
extendEnv UnitPat () = id
extendEnv (VarPat vb) b = (Bind vb b :)
extendEnv (p :$ q) (a,b) = extendEnv q b . extendEnv p a
extendEnv (p :@ q) b = extendEnv q b . extendEnv p b
-- extendEnv ZeroPat Zero = id
-- extendEnv (SuccPat q) (Succ m) = extendEnv q m
-- TODO: Rewrite extendEnv so that it examines the pattern just once,
-- independently from the value.
lookupVar :: forall a. V a -> Env -> Maybe a
lookupVar va = listToMaybe . catMaybes . map check
where
check :: Bind -> Maybe a
check (Bind vb b) | Just Refl <- va ===? vb = Just b
| otherwise = Nothing
-- Oh, hm. I'm using a difference (Hughes) list representation. extendEnv maps
-- UnitPat, VarPat, and PairPat to mempty, singleton, and mappend, respectively.
--
-- TODO: adopt another representation, such as Seq. Replace the explicit
-- recursion in lookupVar with a fold or something. It's almost a mconcat.
vars :: Name -> (Pat a, E p a)
vars = (VarPat &&& Var) . V
-- vars n = (VarPat v, Var v) where v = V n typ
vars2 :: (Name,Name) -> (Pat (a,b), (E p a,E p b))
vars2 (na,nb) = (ap :$ bp, (ae,be))
where
(ap,ae) = vars na
(bp,be) = vars nb
{--------------------------------------------------------------------
Rules
--------------------------------------------------------------------}
kPrim :: p a -> E p a
kPrim = ConstE
kLit :: HasLit a => a -> EP a
kLit = kPrim . litP
-- Temporary monomorphic specialization of kLit, until I know how to synthesize
-- dictionaries.
intL :: Int -> EP Int
intL = kLit
-- TODO: change the following rules back to reifyE
#if 0
{-# RULES
"reify/not" reifyEP not = kPrim NotP
"reify/(&&)" reifyEP (&&) = kPrim AndP
"reify/(||)" reifyEP (||) = kPrim OrP
"reify/xor" reifyEP xor = kPrim XorP
"reify/(+)" reifyEP (+) = kPrim AddP
"reify/(-)" reifyEP (-) = kPrim SubP
"reify/(*)" reifyEP (*) = kPrim MulP
"reify/exl" reifyEP fst = kPrim ExlP
"reify/exr" reifyEP snd = kPrim ExrP
"reify/pair" reifyEP (,) = kPrim PairP
"reify/inl" reifyEP Left = kPrim InlP
"reify/inr" reifyEP Right = kPrim InrP
"reify/if" reifyEP muxB = kPrim CondBP
"reify/()" reifyEP () = kLit ()
"reify/false" reifyEP False = kLit False
"reify/true" reifyEP True = kLit True
-- TODO: Why reify/unPair' and not reify/unVecZ & reify/unVecS ?
-- TODO: trees
-- -- This one avoids currying
-- "reify/(a:<as)" forall a as. reifyEP (a:<as) = reifyEP (toVecS' (a,as))
-- HACK/experiment.
-- Doesn't fire when the 2 is let-abstracted.
-- TODO: Fix worthLet in Reify.
-- "reify/square" reifyEP (^2) = reifyEP square
#-}
#endif
-- square :: Num a => Unop a
-- square a = a * a
-- For literals, I'd like to say
--
-- "reify/lit" forall a x. HasLit a => reifyEP x = kLit x
--
-- but I don't think GHC likes constraints here.
{-# RULES
"True/xor" forall b. True `xor` b = not b
"xor/True" forall a. a `xor` True = not a
"False/xor" forall b. False `xor` b = b
"xor/False" forall a. a `xor` False = a
"0 + a" forall a. 0 + a = a
"a + 0" forall a. a + 0 = a
#-}
#if 0
-- "reify/if-bool" reifyEP cond = reifyEP condBool
-- "reify/if-pair" reifyEP cond = reifyEP condPair
condBool :: (Bool,(Bool,Bool)) -> Bool
condBool (i,(e,t)) = (i && t) || (not i && e)
-- TODO:
-- Maybe ditch condBool in favor of mux on (->).
-- Or maybe define condBool = eval CondBP.
-- Hm. Might need dictionary argument.
-- condBool (i,(t,e)) = if i then t else e
condPair :: (Bool,((a,b),(a,b))) -> (a,b)
condPair (a,((b',b''),(c',c''))) = (cond (a,(b',c')),cond (a,(b'',c'')))
#endif
#if 0
-- TODO: if-splitting has gone through a few incarnations. Re-examine, and
-- prune away unused code.
{-# RULES
"if/pair" forall a b c b' c'.
ifThenElse a (b,c) (b',c') = (ifThenElse a b c,ifThenElse a b' c')
"condPair" forall q. cond q = condPair q
#-}
{-# RULES
"if-split" forall a b c.
ifThenElse a b c = (ifThenElse a (fst b) (fst c),ifThenElse a (fst b) (fst c))
#-}
#endif
{--------------------------------------------------------------------
Constructors that take Addr#, for ReifyLambda
--------------------------------------------------------------------}
var# :: forall a p. p ~ Prim =>
Addr# -> E p a
var# addr = Var (V (unpackCString# addr))
varPat# :: forall a. Addr# -> Pat a
varPat# addr = VarPat (V (unpackCString# addr))
asPat# :: forall a. Addr# -> Pat a -> Pat a
asPat# addr pat = varPat# addr :@ pat
lamv# :: forall a b p. (PrimBasics p, Eq1' p, p ~ Prim) =>
Addr# -> E p b -> E p (a -> b)
lamv# addr body = lam (VarPat (V (unpackCString# addr))) body
letv# :: forall p a b. (PrimBasics p, Eq1' p) =>
Addr# -> E p a -> E p b -> E p b
letv# addr body = lett (varPat# addr) body
casev# :: forall a b c p. (PrimBasics p, Eq1' p, p ~ Prim) =>
Addr# -> E p c -> Addr# -> E p c -> E p (a :+ b) -> E p c
casev# a q b = caseEither (varPat# a) q (varPat# b)
-- TODO: Drop the p ~ Prim constraints, and tweak ReifyLambda to pass in Prim as
-- a type argument.
{--------------------------------------------------------------------
Less polymorphic versions temporarily
--------------------------------------------------------------------}
type EP = E Prim
appP :: forall b a . EP (a :=> b) -> EP a -> EP b
appP = (@^)
lamP :: forall a b. Pat a -> EP b -> EP (a -> b)
lamP = lam
lettP :: forall a b. Pat a -> EP a -> EP b -> EP b
lettP = lett
varP# :: Addr# -> EP a
varP# = var#
lamvP# :: forall a b. Addr# -> EP b -> EP (a -> b)
lamvP# = lamv#
letvP# :: forall a b. Addr# -> EP a -> EP b -> EP b
letvP# = letv#
casevP# :: forall a b c. Addr# -> EP c -> Addr# -> EP c -> EP (a :+ b) -> EP c
casevP# = casev#
eitherEP :: forall a c b . EP (a -> c) -> EP (b -> c) -> EP (a :+ b -> c)
eitherEP = eitherE
-- The order a c b matches 'either'
-- coerceEP :: forall a b . (Typeable a, Typeable b, Coercible a b) => EP a -> EP b
-- coerceEP = coerceE
abstEP :: forall a. HasRep a => forall a'. Rep a ~ a' => EP (a' -> a)
abstEP = kPrim AbstP
reprEP :: forall a. HasRep a => forall a'. Rep a ~ a' => EP (a -> a')
reprEP = kPrim ReprP
-- The odd signatures of abstEP and reprEP are to match those of the abst and
-- repr methods.
ifEP :: forall a. IfCat (:>) a => EP (Bool :* (a :* a) -> a)
ifEP = kPrim IfP
bottomEP :: forall a. BottomCat (:>) a => EP a
bottomEP = kPrim BottomP @^ ConstE unitP
loopEP :: forall a b s. CircuitLoopKon s => EP (a :* s -> b :* s) -> EP (a -> b)
loopEP = Loop
evalEP :: EP a -> a
evalEP = evalE
{-# NOINLINE evalEP #-}
reifyEP :: a -> EP a
reifyEP = reifyE
{-# NOINLINE reifyEP #-}
-- If reifyEP doesn't get inlined, change the reifyE prim rules below to
-- reifyEP.
kPrimEP :: Prim a -> EP a
kPrimEP = kPrim
-- oops :: EP a
-- oops = kPrim OopsP
reifyOopsEP# :: Addr# -> EP a
reifyOopsEP# = reifyOops#
{--------------------------------------------------------------------
Move elsewhere
--------------------------------------------------------------------}
-- I'm experimenting with dropping Eq' in favor of Eq1' (and renaming the latter).
-- instance Eq1' Prim where (====) = (===)
instance Eq1' Prim where
LitP a ==== LitP b = a ==== b
NotP ==== NotP = True
AndP ==== AndP = True
OrP ==== OrP = True
XorP ==== XorP = True
NegateP ==== NegateP = True
AddP ==== AddP = True
SubP ==== SubP = True
MulP ==== MulP = True
EqP ==== EqP = True
NeP ==== NeP = True
LtP ==== LtP = True
GtP ==== GtP = True
LeP ==== LeP = True
GeP ==== GeP = True
ExlP ==== ExlP = True
ExrP ==== ExrP = True
InlP ==== InlP = True
InrP ==== InrP = True
PairP ==== PairP = True
#if 0
CondBP ==== CondBP = True
CondIP ==== CondIP = True
#else
IfP ==== IfP = True
#endif
AbstP ==== AbstP = True
ReprP ==== ReprP = True
BottomP ==== BottomP = True
_ ==== _ = False
instance Eq1' Lit where
UnitL x ==== UnitL y = x == y
BoolL x ==== BoolL y = x == y
IntL x ==== IntL y = x == y
DoubleL x ==== DoubleL y = x == y
_ ==== _ = False
type IfCirc = IfCat (:>)
-- Matches ifC from IfCat in Circat.Classes
if' :: forall a. IfCirc a => Bool :* (a :* a) -> a
if' (i,(t,e)) = if i then t else e
{-# NOINLINE if' #-}
#if 0
-- Matches bottom from BottomCat in Circat.Classes
bottom' :: forall a. CircuitBot a => Bool :* (a :* a) -> a
bottom' = error "bottom'"
{-# NOINLINE bottom' #-}
#endif
|
conal/lambda-ccc
|
src/LambdaCCC/Lambda.hs
|
bsd-3-clause
| 23,605 | 0 | 14 | 6,216 | 6,300 | 3,359 | 2,941 | 323 | 6 |
{-# OPTIONS -fno-warn-type-defaults -fno-warn-unused-binds -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances, ExistentialQuantification #-}
module Test.TestParseTime where
import Control.Monad
import Data.Char
import Data.Ratio
import Data.Time
import Data.Time.Calendar.OrdinalDate
import Data.Time.Calendar.WeekDate
import Data.Time.Clock.POSIX
import Test.QuickCheck hiding (Result,reason)
import Test.QuickCheck.Property hiding (result)
import Test.TestUtil hiding (Result)
ntest :: Int
ntest = 1000
type NamedProperty = (String, Property)
testParseTime :: Test
testParseTime = testGroup "testParseTime"
[
readOtherTypesTest,
readTests,
simpleFormatTests,
extests,
particularParseTests,
badParseTests,
defaultTimeZoneTests,
militaryTimeZoneTests,
testGroup "properties" (fmap (\(n,prop) -> testProperty n prop) properties)
]
yearDays :: Integer -> [Day]
yearDays y = [(fromGregorian y 1 1) .. (fromGregorian y 12 31)]
makeExhaustiveTest :: String -> [t] -> (t -> Test) -> Test
makeExhaustiveTest name cases f = testGroup name (fmap f cases)
extests :: Test
extests = testGroup "exhaustive" ([
makeExhaustiveTest "parse %y" [0..99] parseYY,
makeExhaustiveTest "parse %-C %y 1900s" [0,1,50,99] (parseCYY 19),
makeExhaustiveTest "parse %-C %y 2000s" [0,1,50,99] (parseCYY 20),
makeExhaustiveTest "parse %-C %y 1400s" [0,1,50,99] (parseCYY 14),
makeExhaustiveTest "parse %C %y 0700s" [0,1,50,99] (parseCYY2 7),
makeExhaustiveTest "parse %-C %y 700s" [0,1,50,99] (parseCYY 7),
makeExhaustiveTest "parse %-C %y 10000s" [0,1,50,99] (parseCYY 100),
makeExhaustiveTest "parse %-C centuries" [20..100] (parseCentury " "),
makeExhaustiveTest "parse %-C century X" [1,10,20,100] (parseCentury "X"),
makeExhaustiveTest "parse %-C century 2sp" [1,10,20,100] (parseCentury " "),
makeExhaustiveTest "parse %-C century 5sp" [1,10,20,100] (parseCentury " ")
] ++
(concat $ fmap
(\y -> [
(makeExhaustiveTest "parse %Y%m%d" (yearDays y) parseYMD),
(makeExhaustiveTest "parse %Y %m %d" (yearDays y) parseYearDayD),
(makeExhaustiveTest "parse %Y %-m %e" (yearDays y) parseYearDayE)
]) [1,4,20,753,2000,2011,10001]))
readTest :: (Eq a,Show a,Read a) => [(a,String)] -> String -> Test
readTest expected target = let
found = reads target
result = diff expected found
name = show target
in pureTest name result
readTestsParensSpaces :: forall a. (Eq a,Show a,Read a) => a -> String -> Test
readTestsParensSpaces expected target = testGroup target
[
readTest [(expected,"")] $ target,
readTest [(expected,"")] $ "("++target++")",
readTest [(expected,"")] $ " ("++target++")",
readTest [(expected," ")] $ " ( "++target++" ) ",
readTest [(expected," ")] $ " (( "++target++" )) ",
readTest ([] :: [(a,String)]) $ "("++target,
readTest [(expected,")")] $ ""++target++")",
readTest [(expected,"")] $ "(("++target++"))",
readTest [(expected," ")] $ " ( ( "++target++" ) ) "
] where
readOtherTypesTest :: Test
readOtherTypesTest = testGroup "read other types"
[
readTestsParensSpaces 3 "3",
readTestsParensSpaces "a" "\"a\""
]
readTests :: Test
readTests = testGroup "read times"
[
readTestsParensSpaces testDay "1912-07-08",
--readTestsParensSpaces testDay "1912-7-8",
readTestsParensSpaces testTimeOfDay "08:04:02"
--,readTestsParensSpaces testTimeOfDay "8:4:2"
] where
testDay = fromGregorian 1912 7 8
testTimeOfDay = TimeOfDay 8 4 2
epoch :: LocalTime
epoch = LocalTime (fromGregorian 1970 0 0) midnight
simpleFormatTests :: Test
simpleFormatTests = testGroup "simple"
[
readsTest [(epoch,"")] "" "",
readsTest [(epoch," ")] "" " ",
readsTest [(epoch,"")] " " " ",
readsTest [(epoch,"")] " " " ",
readsTest [(epoch,"")] "%k" "0",
readsTest [(epoch,"")] "%k" " 0",
readsTest [(epoch,"")] "%m" "01",
readsTest [(epoch," ")] "%m" "01 ",
readsTest [(epoch," ")] " %m" " 01 ",
readsTest [(epoch,"")] " %m" " 01",
-- https://ghc.haskell.org/trac/ghc/ticket/9150
readsTest [(epoch,"")] " %M" " 00",
readsTest [(epoch,"")] "%M " "00 ",
readsTest [(epoch,"")] "%Q" "",
readsTest [(epoch," ")] "%Q" " ",
readsTest [(epoch,"X")] "%Q" "X",
readsTest [(epoch," X")] "%Q" " X",
readsTest [(epoch,"")] "%Q " " ",
readsTest [(epoch,"")] "%Q X" " X",
readsTest [(epoch,"")] "%QX" "X"
] where
readsTest :: (Show a, Eq a, ParseTime a) => [(a,String)] -> String -> String -> Test
readsTest expected formatStr target = let
found = readSTime False defaultTimeLocale formatStr target
result = diff expected found
name = (show formatStr) ++ " of " ++ (show target)
in pureTest name result
spacingTests :: (Show t, Eq t, ParseTime t) => t -> String -> String -> Test
spacingTests expected formatStr target = testGroup "particular"
[
parseTest False (Just expected) formatStr target,
parseTest True (Just expected) formatStr target,
parseTest False (Just expected) (formatStr ++ " ") (target ++ " "),
parseTest True (Just expected) (formatStr ++ " ") (target ++ " "),
parseTest False (Just expected) (" " ++ formatStr) (" " ++ target),
parseTest True (Just expected) (" " ++ formatStr) (" " ++ target),
parseTest True (Just expected) ("" ++ formatStr) (" " ++ target),
parseTest True (Just expected) (" " ++ formatStr) (" " ++ target)
]
particularParseTests :: Test
particularParseTests = testGroup "particular"
[
spacingTests epoch "%Q" "",
spacingTests epoch "%Q" ".0",
spacingTests epoch "%k" " 0",
spacingTests epoch "%M" "00",
spacingTests epoch "%m" "01",
spacingTests (TimeZone 120 False "") "%z" "+0200",
spacingTests (TimeZone 120 False "") "%Z" "+0200",
spacingTests (TimeZone (-480) False "PST") "%Z" "PST"
]
badParseTests :: Test
badParseTests = testGroup "bad"
[
parseTest False (Nothing :: Maybe Day) "%Y" ""
]
parseYMD :: Day -> Test
parseYMD day = case toGregorian day of
(y,m,d) -> parseTest False (Just day) "%Y%m%d" ((show y) ++ (show2 m) ++ (show2 d))
parseYearDayD :: Day -> Test
parseYearDayD day = case toGregorian day of
(y,m,d) -> parseTest False (Just day) "%Y %m %d" ((show y) ++ " " ++ (show2 m) ++ " " ++ (show2 d))
parseYearDayE :: Day -> Test
parseYearDayE day = case toGregorian day of
(y,m,d) -> parseTest False (Just day) "%Y %-m %e" ((show y) ++ " " ++ (show m) ++ " " ++ (show d))
-- | 1969 - 2068
expectedYear :: Integer -> Integer
expectedYear i | i >= 69 = 1900 + i
expectedYear i = 2000 + i
show2 :: (Show n,Integral n) => n -> String
show2 i = (show (div i 10)) ++ (show (mod i 10))
parseYY :: Integer -> Test
parseYY i = parseTest False (Just (fromGregorian (expectedYear i) 1 1)) "%y" (show2 i)
parseCYY :: Integer -> Integer -> Test
parseCYY c i = parseTest False (Just (fromGregorian ((c * 100) + i) 1 1)) "%-C %y" ((show c) ++ " " ++ (show2 i))
parseCYY2 :: Integer -> Integer -> Test
parseCYY2 c i = parseTest False (Just (fromGregorian ((c * 100) + i) 1 1)) "%C %y" ((show2 c) ++ " " ++ (show2 i))
parseCentury :: String -> Integer -> Test
parseCentury int c = parseTest False (Just (fromGregorian (c * 100) 1 1)) ("%-C" ++ int ++ "%y") ((show c) ++ int ++ "00")
parseTest :: (Show t, Eq t, ParseTime t) => Bool -> Maybe t -> String -> String -> Test
parseTest sp expected formatStr target =
let
found = parse sp formatStr target
result = diff expected found
name = (show formatStr) ++ " of " ++ (show target) ++ (if sp then " allowing spaces" else "")
in pureTest name result
{-
readsTest :: forall t. (Show t, Eq t, ParseTime t) => Maybe t -> String -> String -> Test
readsTest (Just e) = readsTest' [(e,"")]
readsTest Nothing = readsTest' ([] :: [(t,String)])
-}
enumAdd :: (Enum a) => Int -> a -> a
enumAdd i a = toEnum (i + fromEnum a)
getMilZoneLetter :: Int -> Char
getMilZoneLetter 0 = 'Z'
getMilZoneLetter h | h < 0 = enumAdd (negate h) 'M'
getMilZoneLetter h | h < 10 = enumAdd (h - 1) 'A'
getMilZoneLetter h = enumAdd (h - 10) 'K'
getMilZone :: Int -> TimeZone
getMilZone hour = TimeZone (hour * 60) False [getMilZoneLetter hour]
testParseTimeZone :: TimeZone -> Test
testParseTimeZone tz = parseTest False (Just tz) "%Z" (timeZoneName tz)
defaultTimeZoneTests :: Test
defaultTimeZoneTests = testGroup "default time zones" (fmap testParseTimeZone (knownTimeZones defaultTimeLocale))
militaryTimeZoneTests :: Test
militaryTimeZoneTests = testGroup "military time zones" (fmap (testParseTimeZone . getMilZone) [-12 .. 12])
parse :: ParseTime t => Bool -> String -> String -> Maybe t
parse sp f t = parseTimeM sp defaultTimeLocale f t
format :: (FormatTime t) => String -> t -> String
format f t = formatTime defaultTimeLocale f t
instance Arbitrary Day where
arbitrary = liftM ModifiedJulianDay $ choose (-313698, 2973483) -- 1000-01-1 to 9999-12-31
instance CoArbitrary Day where
coarbitrary (ModifiedJulianDay d) = coarbitrary d
instance Arbitrary DiffTime where
arbitrary = oneof [intSecs, fracSecs] -- up to 1 leap second
where intSecs = liftM secondsToDiffTime' $ choose (0, 86400)
fracSecs = liftM picosecondsToDiffTime' $ choose (0, 86400 * 10^12)
secondsToDiffTime' :: Integer -> DiffTime
secondsToDiffTime' = fromInteger
picosecondsToDiffTime' :: Integer -> DiffTime
picosecondsToDiffTime' x = fromRational (x % 10^12)
instance CoArbitrary DiffTime where
coarbitrary t = coarbitrary (fromEnum t)
instance Arbitrary TimeOfDay where
arbitrary = liftM timeToTimeOfDay arbitrary
instance CoArbitrary TimeOfDay where
coarbitrary t = coarbitrary (timeOfDayToTime t)
instance Arbitrary LocalTime where
arbitrary = liftM2 LocalTime arbitrary arbitrary
instance CoArbitrary LocalTime where
coarbitrary t = coarbitrary (truncate (utcTimeToPOSIXSeconds (localTimeToUTC utc t)) :: Integer)
instance Arbitrary TimeZone where
arbitrary = liftM minutesToTimeZone $ choose (-720,720)
instance CoArbitrary TimeZone where
coarbitrary tz = coarbitrary (timeZoneMinutes tz)
instance Arbitrary ZonedTime where
arbitrary = liftM2 ZonedTime arbitrary arbitrary
instance CoArbitrary ZonedTime where
coarbitrary t = coarbitrary (truncate (utcTimeToPOSIXSeconds (zonedTimeToUTC t)) :: Integer)
instance Arbitrary UTCTime where
arbitrary = liftM2 UTCTime arbitrary arbitrary
instance CoArbitrary UTCTime where
coarbitrary t = coarbitrary (truncate (utcTimeToPOSIXSeconds t) :: Integer)
-- missing from the time package
instance Eq ZonedTime where
ZonedTime t1 tz1 == ZonedTime t2 tz2 = t1 == t2 && tz1 == tz2
compareResult' :: (Eq a,Show a) => String -> a -> a -> Result
compareResult' extra expected found
| expected == found = succeeded
| otherwise = failed {reason = "expected " ++ (show expected) ++ ", found " ++ (show found) ++ extra}
compareResult :: (Eq a,Show a) => a -> a -> Result
compareResult = compareResult' ""
compareParse :: forall a. (Eq a,Show a,ParseTime a) => a -> String -> String -> Result
compareParse expected fmt text = compareResult' (", parsing " ++ (show text)) (Just expected) (parse False fmt text)
--
-- * tests for dbugging failing cases
--
test_parse_format :: (FormatTime t,ParseTime t,Show t) => String -> t -> (String,String,Maybe t)
test_parse_format f t = let s = format f t in (show t, s, parse False f s `asTypeOf` Just t)
--
-- * show and read
--
prop_read_show :: (Read a, Show a, Eq a) => a -> Result
prop_read_show t = compareResult [(t,"")] (reads (show t))
prop_read_show' :: (Read a, Show a, Eq a) => a -> Result
prop_read_show' t = compareResult t (read (show t))
--
-- * special show functions
--
prop_parse_showWeekDate :: Day -> Result
prop_parse_showWeekDate d = compareParse d "%G-W%V-%u" (showWeekDate d)
prop_parse_showGregorian :: Day -> Result
prop_parse_showGregorian d = compareParse d "%Y-%m-%d" (showGregorian d)
prop_parse_showOrdinalDate :: Day -> Result
prop_parse_showOrdinalDate d = compareParse d "%Y-%j" (showOrdinalDate d)
--
-- * fromMondayStartWeek and fromSundayStartWeek
--
prop_fromMondayStartWeek :: Day -> Result
prop_fromMondayStartWeek d =
let (w,wd) = mondayStartWeek d
(y,_,_) = toGregorian d
in compareResult d (fromMondayStartWeek y w wd)
prop_fromSundayStartWeek :: Day -> Result
prop_fromSundayStartWeek d =
let (w,wd) = sundayStartWeek d
(y,_,_) = toGregorian d
in compareResult d (fromSundayStartWeek y w wd)
--
-- * format and parse
--
-- | Helper for defining named properties.
prop_named :: (Arbitrary t, Show t, Testable a)
=> String -> (FormatString s -> t -> a) -> String -> FormatString s -> NamedProperty
prop_named n prop typeName f = (n ++ " " ++ typeName ++ " " ++ show f, property (prop f))
prop_parse_format :: (Eq t, FormatTime t, ParseTime t, Show t) => FormatString t -> t -> Result
prop_parse_format (FormatString f) t = compareParse t f (format f t)
prop_parse_format_named :: (Arbitrary t, Eq t, Show t, FormatTime t, ParseTime t)
=> String -> FormatString t -> NamedProperty
prop_parse_format_named = prop_named "prop_parse_format" prop_parse_format
-- Verify case-insensitivity with upper case.
prop_parse_format_upper :: (Eq t, FormatTime t, ParseTime t, Show t) => FormatString t -> t -> Result
prop_parse_format_upper (FormatString f) t = compareParse t f (map toUpper $ format f t)
prop_parse_format_upper_named :: (Arbitrary t, Eq t, Show t, FormatTime t, ParseTime t)
=> String -> FormatString t -> NamedProperty
prop_parse_format_upper_named = prop_named "prop_parse_format_upper" prop_parse_format_upper
-- Verify case-insensitivity with lower case.
prop_parse_format_lower :: (Eq t, FormatTime t, ParseTime t, Show t) => FormatString t -> t -> Result
prop_parse_format_lower (FormatString f) t = compareParse t f (map toLower $ format f t)
prop_parse_format_lower_named :: (Arbitrary t, Eq t, Show t, FormatTime t, ParseTime t)
=> String -> FormatString t -> NamedProperty
prop_parse_format_lower_named = prop_named "prop_parse_format_lower" prop_parse_format_lower
prop_format_parse_format :: (FormatTime t, ParseTime t, Show t) => FormatString t -> t -> Result
prop_format_parse_format (FormatString f) t = compareResult
(Just (format f t))
(fmap (format f) (parse False f (format f t) `asTypeOf` Just t))
prop_format_parse_format_named :: (Arbitrary t, Show t, FormatTime t, ParseTime t)
=> String -> FormatString t -> NamedProperty
prop_format_parse_format_named = prop_named "prop_format_parse_format" prop_format_parse_format
--
-- * crashes in parse
--
newtype Input = Input String
instance Show Input where
show (Input s) = s
instance Arbitrary Input where
arbitrary = liftM Input $ list cs
where cs = elements (['0'..'9'] ++ ['-',' ','/'] ++ ['a'..'z'] ++ ['A' .. 'Z'])
list g = sized (\n -> choose (0,n) >>= \l -> replicateM l g)
instance CoArbitrary Input where
coarbitrary (Input s) = coarbitrary (sum (map ord s))
prop_no_crash_bad_input :: (Eq t, ParseTime t) => FormatString t -> Input -> Property
prop_no_crash_bad_input fs@(FormatString f) (Input s) = property $
case parse False f s of
Nothing -> True
Just t -> t == t `asTypeOf` formatType fs
where
prop_no_crash_bad_input_named :: (Eq t, ParseTime t)
=> String -> FormatString t -> NamedProperty
prop_no_crash_bad_input_named = prop_named "prop_no_crash_bad_input" prop_no_crash_bad_input
--
--
--
newtype FormatString a = FormatString String
formatType :: FormatString t -> t
formatType _ = undefined
castFormatString :: FormatString a -> FormatString b
castFormatString (FormatString f) = FormatString f
instance Show (FormatString a) where
show (FormatString f) = show f
properties :: [NamedProperty]
properties =
[("prop_fromMondayStartWeek", property prop_fromMondayStartWeek),
("prop_fromSundayStartWeek", property prop_fromSundayStartWeek)]
++ [("prop_read_show Day", property (prop_read_show :: Day -> Result)),
("prop_read_show TimeOfDay", property (prop_read_show :: TimeOfDay -> Result)),
("prop_read_show LocalTime", property (prop_read_show :: LocalTime -> Result)),
("prop_read_show TimeZone", property (prop_read_show :: TimeZone -> Result)),
("prop_read_show ZonedTime", property (prop_read_show :: ZonedTime -> Result)),
("prop_read_show UTCTime", property (prop_read_show :: UTCTime -> Result))]
++ [("prop_parse_showWeekDate", property prop_parse_showWeekDate),
("prop_parse_showGregorian", property prop_parse_showGregorian),
("prop_parse_showOrdinalDate", property prop_parse_showOrdinalDate)]
++ map (prop_parse_format_named "Day") dayFormats
++ map (prop_parse_format_named "TimeOfDay") timeOfDayFormats
++ map (prop_parse_format_named "LocalTime") localTimeFormats
++ map (prop_parse_format_named "TimeZone") timeZoneFormats
++ map (prop_parse_format_named "ZonedTime") zonedTimeFormats
++ map (prop_parse_format_named "UTCTime") utcTimeFormats
++ map (prop_parse_format_upper_named "Day") dayFormats
++ map (prop_parse_format_upper_named "TimeOfDay") timeOfDayFormats
++ map (prop_parse_format_upper_named "LocalTime") localTimeFormats
++ map (prop_parse_format_upper_named "TimeZone") timeZoneFormats
++ map (prop_parse_format_upper_named "ZonedTime") zonedTimeFormats
++ map (prop_parse_format_upper_named "UTCTime") utcTimeFormats
++ map (prop_parse_format_lower_named "Day") dayFormats
++ map (prop_parse_format_lower_named "TimeOfDay") timeOfDayFormats
++ map (prop_parse_format_lower_named "LocalTime") localTimeFormats
++ map (prop_parse_format_lower_named "TimeZone") timeZoneFormats
++ map (prop_parse_format_lower_named "ZonedTime") zonedTimeFormats
++ map (prop_parse_format_lower_named "UTCTime") utcTimeFormats
++ map (prop_format_parse_format_named "Day") partialDayFormats
++ map (prop_format_parse_format_named "TimeOfDay") partialTimeOfDayFormats
++ map (prop_format_parse_format_named "LocalTime") partialLocalTimeFormats
++ map (prop_format_parse_format_named "ZonedTime") partialZonedTimeFormats
++ map (prop_format_parse_format_named "UTCTime") partialUTCTimeFormats
++ map (prop_no_crash_bad_input_named "Day") (dayFormats ++ partialDayFormats ++ failingPartialDayFormats)
++ map (prop_no_crash_bad_input_named "TimeOfDay") (timeOfDayFormats ++ partialTimeOfDayFormats)
++ map (prop_no_crash_bad_input_named "LocalTime") (localTimeFormats ++ partialLocalTimeFormats)
++ map (prop_no_crash_bad_input_named "TimeZone") (timeZoneFormats)
++ map (prop_no_crash_bad_input_named "ZonedTime") (zonedTimeFormats ++ partialZonedTimeFormats)
++ map (prop_no_crash_bad_input_named "UTCTime") (utcTimeFormats ++ partialUTCTimeFormats)
dayFormats :: [FormatString Day]
dayFormats = map FormatString
[
-- numeric year, month, day
"%Y-%m-%d","%Y%m%d","%C%y%m%d","%Y %m %e","%m/%d/%Y","%d/%m/%Y","%Y/%d/%m","%D %C","%F",
-- month names
"%Y-%B-%d","%Y-%b-%d","%Y-%h-%d",
-- ordinal dates
"%Y-%j",
-- ISO week dates
"%G-%V-%u","%G-%V-%a","%G-%V-%A","%G-%V-%w", "%A week %V, %G", "day %V, week %A, %G",
"%G-W%V-%u",
"%f%g-%V-%u","%f%g-%V-%a","%f%g-%V-%A","%f%g-%V-%w", "%A week %V, %f%g", "day %V, week %A, %f%g",
"%f%g-W%V-%u",
-- monday and sunday week dates
"%Y-w%U-%A", "%Y-w%W-%A", "%Y-%A-w%U", "%Y-%A-w%W", "%A week %U, %Y", "%A week %W, %Y"
]
timeOfDayFormats :: [FormatString TimeOfDay]
timeOfDayFormats = map FormatString
[
-- 24 h formats
"%H:%M:%S.%q","%k:%M:%S.%q","%H%M%S.%q","%T.%q","%X.%q","%R:%S.%q",
"%H:%M:%S%Q","%k:%M:%S%Q","%H%M%S%Q","%T%Q","%X%Q","%R:%S%Q",
-- 12 h formats
"%I:%M:%S.%q %p","%I:%M:%S.%q %P","%l:%M:%S.%q %p","%r %q",
"%I:%M:%S%Q %p","%I:%M:%S%Q %P","%l:%M:%S%Q %p","%r %Q"
]
localTimeFormats' :: [FormatString LocalTime]
localTimeFormats' = map FormatString $
concat [ [df ++ " " ++ tf, tf ++ " " ++ df] | FormatString df <- dayFormats,
FormatString tf <- timeOfDayFormats]
localTimeFormats :: [FormatString LocalTime]
localTimeFormats = map FormatString [{-"%Q","%Q ","%QX"-}]
timeZoneFormats :: [FormatString TimeZone]
timeZoneFormats = map FormatString ["%z","%z%Z","%Z%z","%Z"]
zonedTimeFormats :: [FormatString ZonedTime]
zonedTimeFormats = map FormatString
["%a, %d %b %Y %H:%M:%S.%q %z", "%a, %d %b %Y %H:%M:%S%Q %z", "%s.%q %z", "%s%Q %z",
"%a, %d %b %Y %H:%M:%S.%q %Z", "%a, %d %b %Y %H:%M:%S%Q %Z", "%s.%q %Z", "%s%Q %Z"]
utcTimeFormats :: [FormatString UTCTime]
utcTimeFormats = map FormatString
["%s.%q","%s%Q"]
--
-- * Formats that do not include all the information
--
partialDayFormats :: [FormatString Day]
partialDayFormats = map FormatString
[ ]
partialTimeOfDayFormats :: [FormatString TimeOfDay]
partialTimeOfDayFormats = map FormatString
[ ]
partialLocalTimeFormats :: [FormatString LocalTime]
partialLocalTimeFormats = map FormatString
[ ]
partialZonedTimeFormats :: [FormatString ZonedTime]
partialZonedTimeFormats = map FormatString
[
-- %s does not include second decimals
"%s %z",
-- %S does not include second decimals
"%c", "%a, %d %b %Y %H:%M:%S %Z"
]
partialUTCTimeFormats :: [FormatString UTCTime]
partialUTCTimeFormats = map FormatString
[
-- %s does not include second decimals
"%s",
-- %c does not include second decimals
"%c"
]
--
-- * Known failures
--
knownFailures :: [NamedProperty]
knownFailures =
map (prop_format_parse_format_named "Day") failingPartialDayFormats
failingPartialDayFormats :: [FormatString Day]
failingPartialDayFormats = map FormatString
[ -- ISO week dates with two digit year.
-- This can fail in the beginning or the end of a year where
-- the ISO week date year does not match the gregorian year.
"%g-%V-%u","%g-%V-%a","%g-%V-%A","%g-%V-%w", "%A week %V, %g", "day %V, week %A, %g",
"%g-W%V-%u"
]
|
hvr/time
|
test/Test/TestParseTime.hs
|
bsd-3-clause
| 22,343 | 0 | 40 | 4,285 | 6,945 | 3,692 | 3,253 | 403 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module HomeTest
( homeSpecs
) where
import qualified Data.List as L
import TestImport
homeSpecs :: Spec
homeSpecs =
ydescribe "These are some example tests" $ do
yit "loads the index and checks it looks right" $ do
get HomeR
statusIs 200
htmlAllContain "h1" "Hello"
request $ do
setMethod "POST"
setUrl HomeR
addNonce
fileByLabel "Choose a file" "tests/main.hs" "text/plain" -- talk about self-reference
byLabel "What's on the file?" "Some Content"
statusIs 200
printBody
htmlCount ".message" 1
htmlAllContain ".message" "Some Content"
htmlAllContain ".message" "text/plain"
-- This is a simple example of using a database access in a test. The
-- test will succeed for a fresh scaffolded site with an empty database,
-- but will fail on an existing database with a non-empty user table.
yit "leaves the user table empty" $ do
get HomeR
statusIs 200
users <- runDB $ selectList ([] :: [Filter User]) []
assertEqual "user table empty" 0 $ L.length users
|
Codas/campaign-next
|
tests/HomeTest.hs
|
bsd-3-clause
| 1,294 | 0 | 16 | 463 | 216 | 97 | 119 | 28 | 1 |
module Feature.ProxySpec where
import Test.Hspec hiding (pendingWith)
import SpecHelper
import Network.Wai (Application)
import Protolude hiding (get)
spec :: SpecWith Application
spec =
describe "GET / with proxy" $
it "returns a valid openapi spec with proxy" $
validateOpenApiResponse [("Accept", "application/openapi+json")]
|
Skyfold/postgrest
|
test/Feature/ProxySpec.hs
|
mit
| 346 | 0 | 8 | 57 | 78 | 45 | 33 | 10 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RankNTypes #-}
module Language.Haskell.Refact.Utils.ExactPrint
(
replace
, replaceAnnKey
, copyAnn
, setAnnKeywordDP
, clearPriorComments
, balanceAllComments
, locate
, addEmptyAnn
, addAnnVal
, addAnn
, zeroDP
, setDP
, handleParseResult
, removeAnns
, synthesizeAnns
, addNewKeyword
, addNewKeywords
) where
import qualified GHC as GHC
import qualified Data.Generics as SYB
import Control.Monad
import Language.Haskell.GHC.ExactPrint.Transform
import Language.Haskell.GHC.ExactPrint.Types
import Language.Haskell.GHC.ExactPrint.Utils
import Language.Haskell.Refact.Utils.GhcUtils
import Language.Haskell.Refact.Utils.Monad
import Language.Haskell.Refact.Utils.MonadFunctions
import qualified Data.Map as Map
-- ---------------------------------------------------------------------
-- ++AZ++:TODO: Move this to ghc-exactprint
-- |The annotations are keyed to the constructor, so if we replace a qualified
-- with an unqualified RdrName or vice versa we have to rebuild the key for the
-- appropriate annotation.
replaceAnnKey :: (SYB.Data old,SYB.Data new)
=> GHC.Located old -> GHC.Located new -> Anns -> Anns
replaceAnnKey old new ans =
case Map.lookup (mkAnnKey old) ans of
Nothing -> ans
Just v -> anns'
where
anns1 = Map.delete (mkAnnKey old) ans
anns' = Map.insert (mkAnnKey new) v anns1
-- ---------------------------------------------------------------------
-- ++AZ++ TODO: migrate this to ghc-exactprint
copyAnn :: (SYB.Data old,SYB.Data new)
=> GHC.Located old -> GHC.Located new -> Anns -> Anns
copyAnn old new ans =
case Map.lookup (mkAnnKey old) ans of
Nothing -> ans
Just v -> Map.insert (mkAnnKey new) v ans
-- ---------------------------------------------------------------------
-- | Replaces an old expression with a new expression
replace :: AnnKey -> AnnKey -> Anns -> Maybe Anns
replace old new ans = do
let as = ans
oldan <- Map.lookup old as
newan <- Map.lookup new as
let newan' = Ann
{ annEntryDelta = annEntryDelta oldan
-- , annDelta = annDelta oldan
-- , annTrueEntryDelta = annTrueEntryDelta oldan
, annPriorComments = annPriorComments oldan
, annFollowingComments = annFollowingComments oldan
, annsDP = moveAnns (annsDP oldan) (annsDP newan)
, annSortKey = annSortKey oldan
, annCapturedSpan = annCapturedSpan oldan
}
return ((\anns -> Map.delete old . Map.insert new newan' $ anns) ans)
-- ---------------------------------------------------------------------
-- | Shift the first output annotation into the correct place
moveAnns :: [(KeywordId, DeltaPos)] -> [(KeywordId, DeltaPos)] -> [(KeywordId, DeltaPos)]
moveAnns [] xs = xs
moveAnns ((_, dp): _) ((kw, _):xs) = (kw,dp) : xs
moveAnns _ [] = []
-- ---------------------------------------------------------------------
-- |Change the @DeltaPos@ for a given @KeywordId@ if it appears in the
-- annotation for the given item.
setAnnKeywordDP :: (SYB.Data a) => GHC.Located a -> KeywordId -> DeltaPos -> Transform ()
setAnnKeywordDP la kw dp = modifyAnnsT changer
where
changer ans = case Map.lookup (mkAnnKey la) ans of
Nothing -> ans
Just an -> Map.insert (mkAnnKey la) (an {annsDP = map update (annsDP an)}) ans
update (kw',dp')
| kw == kw' = (kw',dp)
| otherwise = (kw',dp')
-- ---------------------------------------------------------------------
-- |Remove any preceding comments from the given item
clearPriorComments :: (SYB.Data a) => GHC.Located a -> Transform ()
clearPriorComments la = do
edp <- getEntryDPT la
modifyAnnsT $ \ans ->
case Map.lookup (mkAnnKey la) ans of
Nothing -> ans
Just an -> Map.insert (mkAnnKey la) (an {annPriorComments = [] }) ans
setEntryDPT la edp
-- ---------------------------------------------------------------------
balanceAllComments :: SYB.Data a => GHC.Located a -> Transform (GHC.Located a)
balanceAllComments la
-- Must be top-down
= everywhereM' (SYB.mkM inMod
`SYB.extM` inExpr
`SYB.extM` inMatch
`SYB.extM` inStmt
) la
where
inMod :: GHC.ParsedSource -> Transform (GHC.ParsedSource)
inMod m = doBalance m
inExpr :: GHC.LHsExpr GHC.RdrName -> Transform (GHC.LHsExpr GHC.RdrName)
inExpr e = doBalance e
inMatch :: (GHC.LMatch GHC.RdrName (GHC.LHsExpr GHC.RdrName)) -> Transform (GHC.LMatch GHC.RdrName (GHC.LHsExpr GHC.RdrName))
inMatch m = doBalance m
inStmt :: GHC.LStmt GHC.RdrName (GHC.LHsExpr GHC.RdrName) -> Transform (GHC.LStmt GHC.RdrName (GHC.LHsExpr GHC.RdrName))
inStmt s = doBalance s
-- |Balance all comments between adjacent decls, as well as pushing all
-- trailing comments to the right place.
{-
e.g., for
foo = do
return x
where
x = ['a'] -- do
bar = undefined
the "-- do" comment must end up in the trailing comments for "x = ['a']"
-}
doBalance t = do
decls <- hsDecls t
let
go [] = return []
go [x] = return [x]
go (x1:x2:xs) = do
balanceComments x1 x2
go (x2:xs)
_ <- go decls
-- replaceDecls t decls'
unless (null decls) $ moveTrailingComments t (last decls)
return t
--This generates a unique location and wraps the given ast chunk with that location
--Also adds an empty annotation at that location
locate :: (SYB.Data a) => a -> RefactGhc (GHC.Located a)
locate ast = do
loc <- liftT uniqueSrcSpanT
let res = (GHC.L loc ast)
addEmptyAnn res
return res
--Adds an empty annotation at the provided location
addEmptyAnn :: (SYB.Data a) => GHC.Located a -> RefactGhc ()
addEmptyAnn a = addAnn a annNone
--Adds an "AnnVal" annotation at the provided location
addAnnVal :: (SYB.Data a) => GHC.Located a -> RefactGhc ()
addAnnVal a = addAnn a valAnn
where valAnn = annNone {annEntryDelta = DP (0,1), annsDP = [(G GHC.AnnVal, DP (0,0))]}
--Adds the given annotation at the provided location
addAnn :: (SYB.Data a) => GHC.Located a -> Annotation -> RefactGhc ()
addAnn a ann = do
currAnns <- fetchAnnsFinal
let k = mkAnnKey a
setRefactAnns $ Map.insert k ann currAnns
--Sets the entry delta position of an ast chunk
setDP :: (SYB.Data a) => DeltaPos -> GHC.Located a -> RefactGhc ()
setDP dp ast = do
currAnns <- fetchAnnsFinal
let k = mkAnnKey ast
mv = Map.lookup k currAnns
case mv of
Nothing -> return ()
Just v -> addAnn ast (v {annEntryDelta = dp})
--Resets the given AST chunk's delta position to zero.
zeroDP :: (SYB.Data a) => GHC.Located a -> RefactGhc ()
zeroDP = setDP (DP (0,0))
--This just pulls out the successful result from an exact print parser or throws an error if the parse was unsuccessful.
handleParseResult :: String -> Either (GHC.SrcSpan, String) (Anns, a) -> RefactGhc (Anns, a)
handleParseResult msg e = case e of
(Left (_, errStr)) -> error $ "The parse from: " ++ msg ++ " with error: " ++ errStr
(Right res) -> return res
-- This creates an empty annotation for every located item where an annotation does not already exist in the given AST chunk
synthesizeAnns :: (SYB.Data a) => a -> RefactGhc a
synthesizeAnns = generic `SYB.ext2M` located
where generic :: SYB.Data a => a -> RefactGhc a
generic a = do
_ <- SYB.gmapM synthesizeAnns a
return a
located :: (SYB.Data b, SYB.Data loc) => GHC.GenLocated loc b -> RefactGhc (GHC.GenLocated loc b)
located b@(GHC.L ss a) = case SYB.cast ss of
Just (s :: GHC.SrcSpan) -> do
--logm $ "Located found: " ++ (show $ toConstr a)
anns <- fetchAnnsFinal
let castRes = (GHC.L s a)
ann = getAnnotationEP castRes anns
--logm $ "Found ann: " ++ show ann
case ann of
Nothing -> do
--logm "No ann found for located item"
let newKey = mkAnnKey castRes
newAnns = Map.insert newKey annNone anns
setRefactAnns newAnns
return ()
_ -> return ()
_ <- SYB.gmapM synthesizeAnns b
return b
Nothing ->
return b
-- This removes all the annotations associated with the given AST chunk.
removeAnns :: (SYB.Data a) => a -> RefactGhc a
removeAnns = generic `SYB.ext2M` located
where generic :: SYB.Data a => a -> RefactGhc a
generic a = do
_ <- SYB.gmapM synthesizeAnns a
return a
located :: (SYB.Data b, SYB.Data loc) => GHC.GenLocated loc b -> RefactGhc (GHC.GenLocated loc b)
located b@(GHC.L ss a) = case SYB.cast ss of
Just (s :: GHC.SrcSpan) -> do
anns <- fetchAnnsFinal
let k = mkAnnKey (GHC.L s a)
logm $ "Deleting ann at: " ++ (show s)
setRefactAnns $ Map.delete k anns
_ <- SYB.gmapM removeAnns b
return b
Nothing -> return b
--This takes in a located ast chunk and adds the provided keyword and delta position into the annsDP list
--If there is not annotation associated with the chunk nothing happens
addNewKeyword :: (SYB.Data a) => (KeywordId, DeltaPos) -> GHC.Located a -> RefactGhc ()
addNewKeyword entry a = do
anns <- liftT getAnnsT
let key = mkAnnKey a
mAnn = Map.lookup key anns
case mAnn of
Nothing -> return ()
(Just ann) -> do
let newAnn = ann{annsDP = (entry:(annsDP ann))}
setRefactAnns $ Map.insert key newAnn anns
addNewKeywords :: (SYB.Data a) => [(KeywordId, DeltaPos)] -> GHC.Located a -> RefactGhc ()
addNewKeywords entries a = mapM_ ((flip addNewKeyword) a) entries
|
RefactoringTools/HaRe
|
src/Language/Haskell/Refact/Utils/ExactPrint.hs
|
bsd-3-clause
| 10,046 | 0 | 21 | 2,569 | 2,824 | 1,445 | 1,379 | 188 | 3 |
{-# OPTIONS_GHC -Wall -fno-warn-unused-do-bind #-}
module Parse.Type where
import Control.Applicative ((<$>),(<*>),(<*))
import Data.List (intercalate)
import Text.Parsec ((<|>), (<?>), char, many, optionMaybe, string, try)
import qualified AST.Type as Type
import qualified AST.Variable as Var
import Parse.Helpers
import qualified Reporting.Annotation as A
import qualified Reporting.Region as R
tvar :: IParser Type.Raw
tvar =
addLocation
(Type.RVar <$> lowVar <?> "a type variable")
tuple :: IParser Type.Raw
tuple =
do (start, types, end) <- located (parens (commaSep expr))
case types of
[t] -> return t
_ -> return (Type.tuple (R.Region start end) types)
record :: IParser Type.Raw
record =
addLocation $
do char '{'
whitespace
rcrd <- extended <|> normal
dumbWhitespace
char '}'
return rcrd
where
normal = flip Type.RRecord Nothing <$> commaSep field
-- extended record types require at least one field
extended =
do ext <- try (addLocation lowVar <* (whitespace >> string "|"))
whitespace
fields <- commaSep1 field
return (Type.RRecord fields (Just (A.map Type.RVar ext)))
field =
do lbl <- rLabel
whitespace >> hasType >> whitespace
(,) lbl <$> expr
capTypeVar :: IParser String
capTypeVar =
intercalate "." <$> dotSep1 capVar
constructor0 :: IParser Type.Raw
constructor0 =
addLocation $
do name <- capTypeVar
return (Type.RType (Var.Raw name))
term :: IParser Type.Raw
term =
tuple <|> record <|> tvar <|> constructor0
app :: IParser Type.Raw
app =
do start <- getMyPosition
f <- constructor0 <|> try tupleCtor <?> "a type constructor"
args <- spacePrefix term
end <- getMyPosition
case args of
[] -> return f
_ -> return (A.A (R.Region start end) (Type.RApp f args))
where
tupleCtor =
addLocation $
do n <- length <$> parens (many (char ','))
let ctor = "_Tuple" ++ show (if n == 0 then 0 else n+1)
return (Type.RType (Var.Raw ctor))
expr :: IParser Type.Raw
expr =
do start <- getMyPosition
t1 <- app <|> term
arr <- optionMaybe $ try (whitespace >> rightArrow)
case arr of
Nothing ->
return t1
Just _ ->
do whitespace
t2 <- expr
end <- getMyPosition
return (A.A (R.Region start end) (Type.RLambda t1 t2))
constructor :: IParser (String, [Type.Raw])
constructor =
(,) <$> (capTypeVar <?> "another type constructor")
<*> spacePrefix term
|
johnpmayer/elm-compiler
|
src/Parse/Type.hs
|
bsd-3-clause
| 2,639 | 0 | 17 | 739 | 910 | 461 | 449 | 81 | 3 |
{-# LANGUAGE CPP #-}
module Set ( Set, member, empty, insert ) where
import Data.Set
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 603
member :: Ord a => a -> Set a -> Bool
member = elementOf
empty :: Set a
empty = emptySet
insert :: Ord a => a -> Set a -> Set a
insert = flip addToSet
#endif
|
beni55/alex
|
src/Set.hs
|
bsd-3-clause
| 311 | 0 | 8 | 65 | 101 | 55 | 46 | 3 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
module Greek where
import Prelude hiding (words)
import qualified Data.ByteString as BS
import qualified Data.Either as Either
import qualified Data.List as List
import qualified Data.Maybe as Maybe
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import qualified Data.Serialize as Serialize
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import System.FilePath ((</>))
import System.FilePath.Find
import Prepare
import Prepare.Sblgnt.Parser (sblgnt)
import Prepare.Sblgnt.Model (Sblgnt)
import qualified Prepare.Sblgnt.Unify as Sblgnt
import Prepare.Perseus.Paths (perseusShortList)
import Prepare.Perseus.TeiEpidocParser (tei)
import Prepare.Perseus.TeiEpidocModel (Tei)
import qualified Prepare.Perseus.TeiEpidocUnify as Tei
import qualified Primary as Primary
import qualified Prepare.Source.DecomposeWords as Decompose
import qualified Prepare.Source.Output as Output
outputSblgntAgda :: Sblgnt -> IO ()
outputSblgntAgda s = do
let g = Sblgnt.unify s
_ <- printAffixes g
let m = Output.groupModule g
let baseDir = "../agda-primary/src"
mapM_ (Output.writeModule baseDir) (Output.flatModules m)
outputBinaryGroups :: FilePath -> [Primary.Group] -> IO ()
outputBinaryGroups modulesPath gs = do
let encoded = Serialize.encode . Decompose.decomposeGroups $ gs
let path = modulesPath </> "binary-primary/data/groups.data"
_ <- printText ["Writing", Text.pack path]
BS.writeFile path encoded
getWords :: Primary.Content -> [Primary.Word]
getWords (Primary.ContentWord w) = [w]
getWords (Primary.ContentMilestone _) = []
printAffixes :: Primary.Group -> IO ()
printAffixes g = do
let sources = Primary.groupSources g
let contents = concatMap Primary.sourceContents sources
let words = concatMap getWords contents
let prefixes = Set.fromList . fmap Primary.wordPrefix $ words
let suffixes = Set.fromList . fmap Primary.wordSuffix $ words
let printTexts = mapM_ (Text.putStrLn . (\x -> Text.concat ["\"", x, "\""]))
_ <- putStrLn "Prefixes"
_ <- printTexts prefixes
_ <- putStrLn "Suffixes"
_ <- printTexts suffixes
return ()
showResult :: (Sblgnt -> IO ()) -> Either String Sblgnt -> IO ()
showResult _ (Left x) = putStrLn x
showResult f (Right x) = f x
printText :: [Text] -> IO ()
printText = Text.putStrLn . Text.intercalate " "
tryParseTei :: FilePath -> IO (Either String Tei)
tryParseTei xmlPath = loadParse xmlPath tei logBook
showParsingFiles :: [FilePath] -> IO ()
showParsingFiles files = do
results <- mapM (\x -> tryParseTei x >>= \y -> return (y, x)) $ files
_ <- mapM_ (\(x, y) -> putStrLn $ (if x then "✓ " else "× ") ++ y) . List.sort . fmap (\(x, y) -> (Either.isRight x, y)) $ results
putStrLn $ show (length . filter (\(x, _) -> Either.isRight x) $ results) ++ " files parsed"
getPrimaryWords :: Primary.Content -> [Primary.Word]
getPrimaryWords (Primary.ContentWord w) = [w]
getPrimaryWords (Primary.ContentMilestone _) = []
dumpAffixes :: [Primary.Group] -> IO ()
dumpAffixes gs = do
_ <- putStrLn "Prefixes: "
mapM_ (Text.putStrLn . Text.append " ") prefixes
_ <- putStrLn ""
_ <- putStrLn "Suffixes: "
mapM_ (Text.putStrLn . Text.append " ") suffixes
where
prefixes
= Set.fromList
. fmap Primary.wordPrefix
$ words
suffixes
= Set.fromList
. fmap Primary.wordSuffix
$ words
words
= concatMap getPrimaryWords
. concatMap Primary.sourceContents
. concatMap Primary.groupSources
$ gs
dumpInvalidWords :: [Primary.Group] -> IO ()
dumpInvalidWords gs = mapM_ dumpInvalids $ concatMap Primary.groupSources gs
where
dumpInvalids s =
let invalids = getInvalids (concatMap getPrimaryWords $ Primary.sourceContents s)
in
if List.null invalids
then return ()
else do
_ <- Text.putStrLn (Primary.sourceId s)
mapM_ (Text.putStrLn . Text.append " ") invalids
getInvalids
= Set.fromList
. fmap (\(Primary.Word x y z) -> Text.intercalate " || " [x, y, z])
. filter isInvalid
isInvalid (Primary.Word p t s)
= Text.null t
|| not (Text.all isCore t)
|| Text.any isGreekChar p
|| Text.any isGreekChar s
isGreekChar x
= x /= '\x037e' -- Greek question mark
&& ((x >= '\x0370' && x <= '\x03ff')
|| (x >= '\x1f00' && x <= '\x1fff'))
isCore x = isGreekChar x || x == '\x2019' -- apostrophe
loadAllGroups :: FilePath -> IO [Primary.Group]
loadAllGroups dataPath = do
let sblgntFile = dataPath </> "xml-sblgnt/sblgnt.xml"
_ <- printText ["Reading", Text.pack sblgntFile]
sblgntResult <- (fmap . fmap) (Sblgnt.unify) $ loadParse sblgntFile sblgnt emptyLog
let
parseUnify x = do
_ <- putStrLn $ "Reading " ++ x
t <- (fmap . fmap) Tei.unify . tryParseTei $ x
_ <- case t of
Left e -> putStrLn $ " " ++ e
Right r -> Text.putStrLn $ Text.concat
[ " "
, Maybe.maybe "-" id . Primary.sourceAuthor $ r
, " -- "
, Text.intercalate " " . Text.words . Primary.sourceTitle $ r
]
return t
perseusSources <- mapM parseUnify (perseusShortList dataPath)
let perseusGroup = Tei.perseusGroup { Primary.groupSources = Either.rights perseusSources }
let
tryAdd (Left _) xs = xs
tryAdd (Right x) xs = x : xs
let successful = tryAdd sblgntResult [perseusGroup]
return successful
showAllLoadResults :: [FilePath] -> IO ()
showAllLoadResults files = do
let
handleSingle x = do
_ <- putStrLn ""
_ <- putStrLn x
y <- tryParseTei x
case y of
Left e -> putStrLn $ "ERROR: " ++ take 1000 e
Right _ -> putStrLn $ "SUCCESS"
mapM_ handleSingle files
showSingleLoadResult :: FilePath -> IO ()
showSingleLoadResult file =
tryParseTei file >>= \case
Left e -> putStrLn $ e
Right _ -> putStrLn "Success!"
findPerseusFiles :: FilePath -> IO [FilePath]
findPerseusFiles dataPath = do
let perseusDir = dataPath </> "xml-perseus-greek"
perseusFiles <- find always (fileName ~~? "*-grc*.xml") perseusDir
_ <- putStrLn $ (show . length $ perseusFiles) ++ " perseus files"
return perseusFiles
findPapyriFiles :: FilePath -> IO [FilePath]
findPapyriFiles dataPath = do
let papyriDir = dataPath </> "xml-papyri/DDB_EpiDoc_XML/"
find always (fileName ~~? "*.xml") papyriDir
commands :: FilePath -> FilePath -> Map String (IO ())
commands dataPath modulesPath = Map.fromList
[ ("save", loadAllGroups dataPath >>= outputBinaryGroups modulesPath)
, ("dump-affixes", loadAllGroups dataPath >>= dumpAffixes)
, ("dump-invalid-words", loadAllGroups dataPath >>= dumpInvalidWords)
, ("show-parsing", findPerseusFiles dataPath >>= showParsingFiles)
, ("show-all", findPerseusFiles dataPath >>= showAllLoadResults)
, ("show-short-list", showParsingFiles (perseusShortList dataPath))
, ("show-single", showSingleLoadResult (dataPath </> "xml-perseus-greek/data/tlg0032/tlg002/tlg0032.tlg002.perseus-grc2.xml"))
]
|
ancientlanguage/haskell-analysis
|
prepare/app/Greek.hs
|
mit
| 7,028 | 0 | 23 | 1,382 | 2,412 | 1,225 | 1,187 | -1 | -1 |
{-# LANGUAGE BangPatterns
, CPP
, RecordWildCards
, TypeFamilies
, TypeOperators #-}
module BigPixel.BigRGBPixel (
BigRGBPixel (..), BigRGB, BigRGBDelayed
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>), (<*>))
#endif
import Vision.Image.RGB.Type
import Data.Int
import Foreign.Storable (Storable (..))
import Foreign.Ptr (castPtr, plusPtr)
import Vision.Image.Class (Pixel (..))
import Vision.Image.Interpolate (Interpolable (..))
import Vision.Image.Type (Manifest, Delayed)
data BigRGBPixel = BigRGBPixel {
rgbRed :: {-# UNPACK #-} !Int16, rgbGreen :: {-# UNPACK #-} !Int16
, rgbBlue :: {-# UNPACK #-} !Int16
} deriving (Eq, Show)
type BigRGB = Manifest BigRGBPixel
type BigRGBDelayed = Delayed BigRGBPixel
instance Storable BigRGBPixel where
sizeOf _ = 3 * sizeOf (undefined :: Int16)
{-# INLINE sizeOf #-}
alignment _ = alignment (undefined :: Int16)
{-# INLINE alignment #-}
peek !ptr =
let !ptr' = castPtr ptr
in BigRGBPixel <$> peek ptr' <*> peek (ptr' `plusPtr` 1)
<*> peek (ptr' `plusPtr` 2)
{-# INLINE peek #-}
poke !ptr BigRGBPixel { .. } =
let !ptr' = castPtr ptr
in poke ptr' rgbRed >>
poke (ptr' `plusPtr` 1) rgbGreen >>
poke (ptr' `plusPtr` 2) rgbBlue
{-# INLINE poke #-}
instance Pixel BigRGBPixel where
type PixelChannel BigRGBPixel = Int16
pixNChannels _ = 3
{-# INLINE pixNChannels #-}
pixIndex !(BigRGBPixel r _ _) 0 = r
pixIndex !(BigRGBPixel _ g _) 1 = g
pixIndex !(BigRGBPixel _ _ b) _ = b
{-# INLINE pixIndex #-}
instance Interpolable BigRGBPixel where
interpol f a b =
let BigRGBPixel aRed aGreen aBlue = a
BigRGBPixel bRed bGreen bBlue = b
in BigRGBPixel {
rgbRed = f aRed bRed, rgbGreen = f aGreen bGreen
, rgbBlue = f aBlue bBlue
}
{-# INLINE interpol #-}
|
eklinkhammer/haskell-vision
|
BigPixel/BigRGBPixel.hs
|
mit
| 2,033 | 0 | 12 | 594 | 552 | 303 | 249 | 53 | 0 |
{-# htermination isAlphaNum :: Char -> Bool #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_isAlphaNum_1.hs
|
mit
| 48 | 0 | 2 | 8 | 3 | 2 | 1 | 1 | 0 |
module Bank.Models.Account
( module X
) where
import Bank.Models.Account.CommandHandler as X
import Bank.Models.Account.Commands as X
import Bank.Models.Account.Events as X
import Bank.Models.Account.Projection as X
|
jdreaver/eventful
|
examples/bank/src/Bank/Models/Account.hs
|
mit
| 221 | 0 | 4 | 28 | 49 | 37 | 12 | 6 | 0 |
--
-- This code was created by Jeff Molofee '99 (ported to Haskell GHC 2005)
--
module Main where
import qualified Graphics.UI.GLFW as GLFW
-- everything from here starts with gl or GL
import Graphics.Rendering.OpenGL.Raw
import Graphics.Rendering.GLU.Raw ( gluPerspective )
import Data.Bits ( (.|.) )
import System.Exit ( exitWith, ExitCode(..) )
import Control.Monad ( forever )
initGL :: IO ()
initGL = do
glShadeModel gl_SMOOTH -- enables smooth color shading
glClearColor 0 0 0 0 -- Clear the background color to black
glClearDepth 1 -- enables clearing of the depth buffer
glEnable gl_DEPTH_TEST
glDepthFunc gl_LEQUAL -- type of depth test
glHint gl_PERSPECTIVE_CORRECTION_HINT gl_NICEST
resizeScene :: GLFW.WindowSizeCallback
resizeScene w 0 = resizeScene w 1 -- prevent divide by zero
resizeScene width height = do
glViewport 0 0 (fromIntegral width) (fromIntegral height)
glMatrixMode gl_PROJECTION
glLoadIdentity
gluPerspective 45 (fromIntegral width/fromIntegral height) 0.1 100
glMatrixMode gl_MODELVIEW
glLoadIdentity
glFlush
drawScene :: IO ()
drawScene = do
-- clear the screen and the depth bufer
glClear $ fromIntegral $ gl_COLOR_BUFFER_BIT
.|. gl_DEPTH_BUFFER_BIT
glLoadIdentity -- reset view
glTranslatef (-1.5) 0 (-6.0) --Move left 1.5 Units and into the screen 6.0
-- draw a triangle
glBegin gl_TRIANGLES
glVertex3f 0 1 0 -- top
glVertex3f 1 (-1) 0 -- bottom right
glVertex3f (-1) (-1) 0 -- bottom left
glEnd
glTranslatef 3 0 0 -- move right three units
glBegin gl_QUADS
glVertex3f (-1) 1 0 -- top left
glVertex3f 1 1 0 -- top right
glVertex3f 1 (-1) 0 -- bottom right
glVertex3f (-1) (-1) 0 -- bottom left
glEnd
glFlush
shutdown :: GLFW.WindowCloseCallback
shutdown = do
GLFW.closeWindow
GLFW.terminate
_ <- exitWith ExitSuccess
return True
keyPressed :: GLFW.KeyCallback
keyPressed GLFW.KeyEsc True = shutdown >> return ()
keyPressed _ _ = return ()
main :: IO ()
main = do
True <- GLFW.initialize
-- select type of display mode:
-- Double buffer
-- RGBA color
-- Alpha components supported
-- Depth buffer
let dspOpts = GLFW.defaultDisplayOptions
-- get a 800 x 600 window
{ GLFW.displayOptions_width = 800
, GLFW.displayOptions_height = 600
-- Set depth buffering and RGBA colors
, GLFW.displayOptions_numRedBits = 8
, GLFW.displayOptions_numGreenBits = 8
, GLFW.displayOptions_numBlueBits = 8
, GLFW.displayOptions_numAlphaBits = 8
, GLFW.displayOptions_numDepthBits = 1
-- , GLFW.displayOptions_displayMode = GLFW.Fullscreen
}
-- open a window
True <- GLFW.openWindow dspOpts
-- window starts at upper left corner of the screen
GLFW.setWindowPosition 0 0
GLFW.setWindowTitle "Jeff Molofee's GL Code Tutorial ... NeHe '99"
-- register the function to do all our OpenGL drawing
GLFW.setWindowRefreshCallback drawScene
-- register the funciton called when our window is resized
GLFW.setWindowSizeCallback resizeScene
-- register the function called when the keyboard is pressed.
GLFW.setKeyCallback keyPressed
GLFW.setWindowCloseCallback shutdown
-- initialize our window.
initGL
-- start event processing engine
forever $ do
drawScene
GLFW.swapBuffers
|
spetz911/progames
|
nehe-tuts-0.2.3/lesson02.hs
|
mit
| 3,600 | 0 | 11 | 969 | 711 | 361 | 350 | 75 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
FlexibleInstances, UndecidableInstances,
FlexibleContexts,
LambdaCase #-}
module Control.Monad.Activatable.Class (
-- * The 'MonadActivatable' class
MonadActivatable(..),
switching', activateWith, activate,
-- * Activation-related types
ActivationError(..), Switched(..)
) where
import HsToCoq.Util.Functor
import Control.Monad.Error.Class
import Control.Monad.Trans
import Data.Foldable
import qualified Control.Monad.Trans.Activatable as A
import qualified Control.Monad.Trans.Identity as I
import qualified Control.Monad.Trans.Reader as R
import qualified Control.Monad.Trans.Writer.Strict as WS
import qualified Control.Monad.Trans.Writer.Lazy as WL
import qualified Control.Monad.Trans.State.Strict as SS
import qualified Control.Monad.Trans.State.Lazy as SL
import qualified Control.Monad.Trans.RWS.Strict as RWSS
import qualified Control.Monad.Trans.RWS.Lazy as RWSL
import Control.Monad.Trans.Activatable hiding (tryActivate, switching, switching')
-- |The idea is thus:
--
-- @
-- /\
-- / \
-- / \ /\
-- ____________________/ \/ \__________________
-- | | | |
-- basic activated residual basic
-- @
--
-- Or, in code:
--
-- @
-- >>> basic = pure 'b'
-- >>> activated = pure ('A','X')
-- >>> cmd = switching' basic activated
-- >>> activatedList = finalizeActivatableT @(Either _) (const Nothing) . sequence
-- >>> activatedList [cmd, cmd, cmd, cmd, cmd]
-- Right "bbbbb"
-- >>> activatedList [cmd, activateWith Just *> cmd, cmd, cmd, cmd]
-- Right "bAXbb"
-- >>> activatedList [cmd, cmd, cmd, activateWith Just *> cmd, cmd]
-- Right "bbbAX"
-- >>> activatedList [cmd, cmd, cmd, activateWith Just *> cmd, cmd]
-- Right "bbbAX"
-- >>> activatedList [cmd, activateWith Just *> cmd, cmd, activateWith Just *> cmd, cmd]
-- Right "bAXAX"
-- >>> activatedList [cmd, activateWith Just *> activateWith Just *> cmd, cmd, cmd, cmd]
-- Left (Just DoubleActivation)
-- >>> activatedList [cmd, activateWith Just *> cmd, activateWith Just *> cmd, cmd, cmd]
-- Left (Just EarlyActivation)
-- >>> activatedList [cmd, cmd, cmd, cmd, activateWith Just *> cmd]
-- Left Nothing
-- @
class Monad m => MonadActivatable x m | m -> x where
tryActivate :: m (Maybe ActivationError)
switching :: m b -> m (a, x) -> m (Switched b a x)
switching' :: MonadActivatable a m => m a -> m (a, a) -> m a
switching' basic activated = switching basic activated <&> \case
Basic b -> b
Activated a -> a
Residual x -> x
activateWith :: MonadActivatable x m => (ActivationError -> m ()) -> m ()
activateWith handleAE = traverse_ handleAE =<< tryActivate
activate :: (MonadError ActivationError m, MonadActivatable x m) => m ()
activate = activateWith throwError
--------------------------------------------------------------------------------
-- Instances
-- See "Instance helpers" below
instance Monad m => MonadActivatable x (ActivatableT x m) where
tryActivate = A.tryActivate
switching = A.switching
instance MonadActivatable x m => MonadActivatable x (I.IdentityT m) where
tryActivate = lift tryActivate
switching (I.IdentityT basic) (I.IdentityT activated) =
I.IdentityT $ switching basic activated
instance MonadActivatable x m => MonadActivatable x (R.ReaderT r m) where
tryActivate = lift tryActivate
switching (R.ReaderT basic) (R.ReaderT activated) =
R.ReaderT $ switching <$> basic <*> activated
instance (MonadActivatable x m, Monoid w) => MonadActivatable x (WS.WriterT w m) where
tryActivate = lift tryActivate
switching (WS.WriterT basic) (WS.WriterT activated) =
WS.WriterT $ lift_switching (switch_pair_strict mempty) push_pair_strict basic activated
instance (MonadActivatable x m, Monoid w) => MonadActivatable x (WL.WriterT w m) where
tryActivate = lift tryActivate
switching (WL.WriterT basic) (WL.WriterT activated) =
WL.WriterT $ lift_switching (switch_pair_lazy mempty) push_pair_lazy basic activated
instance MonadActivatable x m => MonadActivatable x (SS.StateT s m) where
tryActivate = lift tryActivate
switching (SS.StateT basic) (SS.StateT activated) =
SS.StateT $ \s -> lift_switching (switch_pair_strict s) push_pair_strict (basic s) (activated s)
instance MonadActivatable x m => MonadActivatable x (SL.StateT s m) where
tryActivate = lift tryActivate
switching (SL.StateT basic) (SL.StateT activated) =
SL.StateT $ \s -> lift_switching (switch_pair_lazy s) push_pair_lazy (basic s) (activated s)
instance (MonadActivatable x m, Monoid w) => MonadActivatable x (RWSS.RWST r w s m) where
tryActivate = lift tryActivate
switching (RWSS.RWST basic) (RWSS.RWST activated) =
RWSS.RWST $ \r s -> lift_switching (switch_triple_strict s mempty) push_triple_strict (basic r s) (activated r s)
instance (MonadActivatable x m, Monoid w) => MonadActivatable x (RWSL.RWST r w s m) where
tryActivate = lift tryActivate
switching (RWSL.RWST basic) (RWSL.RWST activated) =
RWSL.RWST $ \r s -> lift_switching (switch_triple_lazy s mempty) push_triple_lazy (basic r s) (activated r s)
--------------------------------------------------------------------------------
-- Instance helpers (module-local)
push_pair_lazy :: ((a,x),o) -> ((a,o),x)
push_pair_lazy ~((a,x),o) = ((a,o),x)
{-# INLINE push_pair_lazy #-}
switch_pair_lazy :: o -> Switched (b,o) (a,o) x -> (Switched b a x, o)
switch_pair_lazy o' = \case
Basic ~(b,o) -> (Basic b, o)
Activated ~(a,o) -> (Activated a, o)
Residual x -> (Residual x, o')
{-# INLINE switch_pair_lazy #-}
push_pair_strict :: ((a,x),o) -> ((a,o),x)
push_pair_strict ((a,x),o) = ((a,o),x)
{-# INLINE push_pair_strict #-}
switch_pair_strict :: o -> Switched (b,o) (a,o) x -> (Switched b a x, o)
switch_pair_strict o_strict = \case
Basic (b,o) -> (Basic b, o)
Activated (a,o) -> (Activated a, o)
Residual x -> (Residual x, o_strict)
{-# INLINE switch_pair_strict #-}
push_triple_lazy :: ((a,x),s,w) -> ((a,s,w),x)
push_triple_lazy ~((a,x),s,w) = ((a,s,w),x)
{-# INLINE push_triple_lazy #-}
switch_triple_lazy :: s -> w -> Switched (b,s,w) (a,s,w) x -> (Switched b a x, s, w)
switch_triple_lazy s wempty = \case
Basic ~(b,s',w) -> (Basic b, s', w)
Activated ~(a,s',w) -> (Activated a, s', w)
Residual x -> (Residual x, s, wempty)
{-# INLINE switch_triple_lazy #-}
push_triple_strict :: ((a,x),s,w) -> ((a,s,w),x)
push_triple_strict ((a,x),s,w) = ((a,s,w),x)
{-# INLINE push_triple_strict #-}
switch_triple_strict :: s -> w -> Switched (b,s,w) (a,s,w) x -> (Switched b a x, s, w)
switch_triple_strict s wempty = \case
Basic (b,s',w) -> (Basic b, s', w)
Activated (a,s',w) -> (Activated a, s', w)
Residual x -> (Residual x, s, wempty)
{-# INLINE switch_triple_strict #-}
lift_switching :: MonadActivatable x m
=> (Switched b a x -> r) -> (a' -> (a, x))
-> m b -> m a' -> m r
lift_switching switch push basic activated = switch <$> switching basic (push <$> activated)
{-# INLINE lift_switching #-}
|
antalsz/hs-to-coq
|
src/lib/Control/Monad/Activatable/Class.hs
|
mit
| 7,714 | 0 | 11 | 1,892 | 2,225 | 1,241 | 984 | 110 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiWayIf #-}
-- |
-- Module : Game.Game.Poker
-- Copyright : (c) 2017 Christopher A. Gorski
-- License : MIT
-- Maintainer : Christopher A. Gorski <[email protected]>
--
-- The Game.Game.Poker module provides operations for five card poker.
module Game.Game.Poker
(
-- * Poker Hand Types
PokerHand
, PokerHandType(..)
, AceRank (..)
, cardsOfPokerHand
, typeOfPokerHand
-- * Building Hands
, mkHand
-- * Hand Type Existence Checks
, isHand
-- * Sets of Hand Types
, allPossibleHands
, allRoyalFlush
, allStraightFlush
, allFourOfAKind
, allFullHouse
, allFlush
, allStraight
, allThreeOfAKind
, allTwoPair
, allPair
, allHighCard
-- * Random Hands
, randomHighCard
, randomPair
, randomTwoPair
, randomThreeOfAKind
, randomStraight
, randomFlush
, randomFullHouse
, randomFourOfAKind
, randomStraightFlush
, randomRoyalFlush
-- * Additional Hand Building Functions
, mkHighCard
, mkPair
, mkTwoPair
, mkThreeOfAKind
, mkStraight
, mkFlush
, mkFullHouse
, mkFourOfAKind
, mkStraightFlush
, mkRoyalFlush
-- * Additional Hand Type Existence Checks
, isHighCard
, isPair
, isTwoPair
, isThreeOfAKind
, isStraight
, isFlush
, isFullHouse
, isFourOfAKind
, isStraightFlush
, isRoyalFlush
)
where
import Control.Monad.Loops
import Control.Monad.Random
import Game.Implement.Card
import Game.Implement.Card.Standard
import Game.Implement.Card.Standard.Poker
import Data.List (nub,find)
import Data.Maybe (isJust, fromJust, catMaybes)
-- |
-- Indicates if a poker hand uses the Ace as a high card or a low card. AceLow is only
-- used when an Ace is in a hand. Any hand without an Ace is considered AceHigh.
--
-- >>>
data AceRank = AceHigh | AceLow deriving (Eq, Show, Ord, Enum, Bounded)
-- |
-- Return the cards in a 'PokerHand'
cardsOfPokerHand :: PokerHand -> [PlayingCard]
cardsOfPokerHand (PokerHand _ h) = h
-- |
-- Return the 'PokerHandType' of a 'PokerHand'
typeOfPokerHand :: PokerHand -> PokerHandType
typeOfPokerHand (PokerHand t _) = t
-- |
-- The type of a 'PokerHand'.
data PokerHandType =
HighCard
| Pair
| TwoPair
| ThreeOfAKind
| Straight AceRank
| Flush
| FullHouse
| FourOfAKind
| StraightFlush AceRank
| RoyalFlush
deriving(Eq,Show)
-- |
-- A poker hand. Constructors are hidden, so any hand encapsulated in this type
-- can be considered a valid hand.
--
-- >>> deck <- evalRandIO $ shuffle $ (fullDeck :: [PlayingCard])
-- >>> hand = draw1_ 5 deck
-- >>> hand
-- [Five of Diamonds,Jack of Spades,Queen of Spades,Queen of Diamonds,Jack of Hearts]
--
-- >>> pokerhand = fromJust $ mkHand hand
-- >>> pokerhand
-- PokerHand TwoPair [Five of Diamonds,Jack of Spades,Queen of Spades,Queen of Diamonds,Jack of Hearts]
--
-- >>> typeOfPokerHand pokerhand
-- TwoPair
--
-- >>> cardsOfPokerHand pokerhand
-- [Five of Diamonds,Jack of Spades,Queen of Spades,Queen of Diamonds,Jack of Hearts]
data PokerHand = PokerHand PokerHandType [PlayingCard] deriving(Eq,Show)
-- |
-- Return a random hand that is not any other hand, also known as "High Card"
randomHighCard :: RandomGen g => Rand g PokerHand
randomHighCard =
let r = do
randHand <- replicateM 5 randomCard
return randHand
in
do
hand <- iterateUntil (\h -> isHighCard h) r
return $ PokerHand HighCard hand
-- |
-- Return a random hand that is a Pair
randomPair :: RandomGen g => Rand g PokerHand
randomPair =
do
numLstR <- uniqueNumList 4 0 12
rank1pair <- return $ replicate 2 $ toEnum $ (fromJust numLstR) !! 0
rank2 <- return $ toEnum $ (fromJust numLstR) !! 1
rank3 <- return $ toEnum $ (fromJust numLstR) !! 2
rank4 <- return $ toEnum $ (fromJust numLstR) !! 3
rankLst <- return $ rank4:rank3:rank2:rank1pair
numLstS1 <- uniqueNumList 2 0 3
suitLst1 <- return $ map (\r -> toEnum r) $ fromJust numLstS1
suit2 <- randomSuit
suit3 <- randomSuit
suit4 <- randomSuit
suitLst <- return $ suit4:suit3:suit2:suitLst1
cardset <- zipWithM (\r s -> return(PlayingCard r s)) rankLst suitLst
shuffleset <- shuffle cardset
return $ PokerHand Pair shuffleset
-- |
-- Return a random hand that is a Two Pair
randomTwoPair :: RandomGen g => Rand g PokerHand
randomTwoPair =
do
numLstR <- uniqueNumList 3 0 12
rank1 <- return $ replicate 2 $ toEnum $ (fromJust numLstR) !! 0
rank2 <- return $ replicate 2 $ toEnum $ (fromJust numLstR) !! 1
rank3 <- return $ toEnum $ (fromJust numLstR) !! 2
rankLst :: [Rank] <- return $ rank3:(rank1 ++ rank2)
numLstS1 <- uniqueNumList 2 0 3
numLstS2 <- uniqueNumList 2 0 3
numS3 <- randomSuit
suitLst1 <- return $ map (\r -> toEnum r) $ fromJust numLstS1
suitLst2 <- return $ map (\r -> toEnum r) $ fromJust numLstS2
suitLst <- return $ numS3:(suitLst1 ++ suitLst2)
cardset <- zipWithM (\r s -> return(PlayingCard r s)) rankLst suitLst
shuffleset <- shuffle cardset
return $ PokerHand TwoPair shuffleset
-- |
-- Return a random hand that is a Three of a Kind
randomThreeOfAKind :: RandomGen g => Rand g PokerHand
randomThreeOfAKind =
do
numLst <- uniqueNumList 3 0 12
rank1 <- return $ replicate 3 $ toEnum $ (fromJust numLst) !! 0
rank2 <- return $ map (\r -> toEnum r) $ drop 1 (fromJust numLst)
rankLst :: [Rank] <- return $ rank1 ++ rank2
numLstS1 <- uniqueNumList 3 0 3
suitLst1 <- return $ map (\r -> toEnum r) $ fromJust numLstS1
suitLst2 <- replicateM 2 randomSuit
suitLst <- return $ suitLst1 ++ suitLst2
cardset <- zipWithM (\r s -> return(PlayingCard r s)) rankLst suitLst
shuffleset <- shuffle cardset
return $ PokerHand ThreeOfAKind shuffleset
-- |
-- Return a random hand that is a Straight
randomStraight :: RandomGen g => Rand g PokerHand
randomStraight =
let
mkRanklst :: Int -> [Rank]
mkRanklst n = map (\m -> toEnum ((m+n) `mod` 13) ) [0..4]
mergelst r s = return(PlayingCard r s)
l = do
startRank :: Int <- getRandomR(0,9)
ranklst <- return (mkRanklst startRank)
suitlst :: [Suit] <- replicateM 5 randomSuit
cardset <- zipWithM mergelst ranklst suitlst
return cardset
in
do
hand <- iterateUntil (\h -> (not $ isStraightFlush h) && (not $ isRoyalFlush h)) l
aceRank <- return (if (toRank $ hand !! 0) == Ace then AceLow else AceHigh)
shuffledHand <- shuffle hand
return $ PokerHand (Straight aceRank) shuffledHand
-- |
-- Return a random hand that is a Flush
randomFlush :: RandomGen g => Rand g PokerHand
randomFlush =
let
l = do
numLst <- uniqueNumList 5 0 12
rankLst :: [Rank] <- return $ map (\n -> toEnum n) $ fromJust $ numLst
randSuit <- randomSuit
suitLst :: [Suit] <- return $ replicate 5 randSuit
cardset <- zipWithM (\r s -> return(PlayingCard r s)) rankLst suitLst
return cardset
in
do
hand <- iterateUntil (\h -> (not $ isRoyalFlush h) && (not $ isStraightFlush h)) l
return $ PokerHand Flush hand
-- |
-- Return a random hand that is a Full House
randomFullHouse :: RandomGen g => Rand g PokerHand
randomFullHouse =
do
numLstR <- uniqueNumList 2 0 12
rank1 <- return $ toEnum $ (fromJust numLstR) !! 0
rank2 <- return $ toEnum $ (fromJust numLstR) !! 1
rankLst :: [Rank] <- return [rank1, rank1, rank1, rank2, rank2]
numLstS1 <- uniqueNumList 3 0 3
numLstS2 <- uniqueNumList 2 0 3
suitLst1 <- return $ map (\r -> toEnum r) $ fromJust numLstS1
suitLst2 <- return $ map (\r -> toEnum r) $ fromJust numLstS2
suitLst <- return $ suitLst1 ++ suitLst2
cardset <- zipWithM (\r s -> return(PlayingCard r s)) rankLst suitLst
shuffleset <- shuffle cardset
return $ PokerHand FullHouse shuffleset
-- |
-- Return a random hand that is a Four of a Kind
randomFourOfAKind :: RandomGen g => Rand g PokerHand
randomFourOfAKind =
do
randRank4 <- randomRank
randRank <- iterateUntil (\r -> r /= randRank4) randomRank
randRanks <- return $ randRank:(replicate 4 randRank4)
randSuit <- randomSuit
randSuits <- return [randSuit, Clubs, Diamonds, Hearts, Spades]
mergedLst <- zipWithM (\r s -> return(PlayingCard r s)) randRanks randSuits
shuffleSet <- shuffle mergedLst
return $ PokerHand FourOfAKind $ shuffleSet
-- |
-- Return a random hand that is a Straight Flush
randomStraightFlush :: RandomGen g => Rand g PokerHand
randomStraightFlush =
let
mkRanklst :: Int -> [Rank]
mkRanklst n = map (\m -> toEnum ((m+n) `mod` 13) ) [0..4]
mergelst r s = return(PlayingCard r s)
l = do
startRank :: Int <- getRandomR(0,9)
ranklst <- return (mkRanklst startRank)
randSuit <- randomSuit
suitlst :: [Suit] <- return (replicate 5 randSuit)
cardset <- zipWithM mergelst ranklst suitlst
return cardset
in
do
hand <- iterateUntil (\h -> (not $ isRoyalFlush h)) l
aceRank <- return (if (toRank $ hand !! 0) == Ace then AceLow else AceHigh)
shuffledHand <- shuffle hand
return $ PokerHand (StraightFlush aceRank) shuffledHand
-- |
-- Return a random hand that is a Royal Flush
randomRoyalFlush :: RandomGen g => Rand g PokerHand
randomRoyalFlush =
let
mkRanklst :: [Rank]
mkRanklst = Ace : (map (\m -> toEnum m) [9..12])
mergelst r s = return(PlayingCard r s) in
do
randSuit <- randomSuit
suitlst :: [Suit] <- return (replicate 5 randSuit)
cardset <- zipWithM mergelst mkRanklst suitlst
shuffledHand <- shuffle cardset
return $ PokerHand RoyalFlush shuffledHand
-- |
-- Given a list of cards, find the best hand in the set. If the number
-- of cards is not equal to five, or there are duplicate cards, mkHand returns
-- Nothing.
mkHand :: [PlayingCard] -> Maybe PokerHand
mkHand hand =
let checks =
[mkHighCard
,mkPair
,mkTwoPair
,mkThreeOfAKind
,mkStraight
,mkFlush
,mkFullHouse
,mkFourOfAKind
,mkStraightFlush
,mkRoyalFlush]
cat = catMaybes $ map (\f -> f hand) checks
in
if length cat == 0
then Nothing
else Just $ cat !! 0
isSameSuit :: [PlayingCard] -> Bool
isSameSuit hand =
let
ff (Just c0) (Just c1) =
if (toSuit c0) == (toSuit c1)
then Just c1
else Nothing
ff _ _ = Nothing
in
case foldl1 ff $ map (\x -> Just x) hand of
Nothing -> False
Just _ -> True
hasConsecutiveRanks :: Order -> [PlayingCard] -> Bool
hasConsecutiveRanks order hand =
let handlst = map (\x -> Just x) $ sortCardsBy order hand
ff (Just c0) (Just c1) =
case (toOrderedValue order RankValueType c1)-(toOrderedValue order RankValueType c0) of
1 -> Just c1
_ -> Nothing
ff _ _ = Nothing
in
case foldl1 ff handlst of
Nothing -> False
_ -> True
nOfRank :: [PlayingCard] -> [(Rank, Int)]
nOfRank hand =
let
rlst = toRankLst hand
uniquelst = nub hand
countel :: PlayingCard -> (Rank, Int)
countel card = ((toRank card), length [x | x <- rlst, (toRank card)==x])
in
nub $ map countel uniquelst
hasNOfRank :: Int -> [PlayingCard] -> Bool
hasNOfRank i hand =
case (find (\(_,n) -> i == n) (nOfRank hand)) of
Just _ -> True
Nothing -> False
hasNumNOfRank :: Int -> Int -> [PlayingCard] -> Bool
hasNumNOfRank i num hand =
if (length (filter (\(_,n) -> i == n) (nOfRank hand))) == num
then True
else False
-- |
-- Verify that the best hand of a set of cards is a high card hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkHighCard :: [PlayingCard] -> Maybe PokerHand
mkHighCard hand
| isValidPokerHand hand =
if (not $ isPair hand)
&& (not $ isTwoPair hand)
&& (not $ isThreeOfAKind hand)
&& (not $ isStraight hand)
&& (not $ isFlush hand)
&& (not $ isFullHouse hand)
&& (not $ isFourOfAKind hand)
&& (not $ isStraightFlush hand)
&& (not $ isRoyalFlush hand)
then Just (PokerHand HighCard hand)
else Nothing
| otherwise = Nothing
-- |
-- Return True if a hand matches a specific PokerHandType. False otherwise.
isHand :: PokerHandType -> [PlayingCard] -> Bool
isHand HighCard cards = if isHighCard cards then True else False
isHand Pair cards = if isPair cards then True else False
isHand TwoPair cards = if isTwoPair cards then True else False
isHand ThreeOfAKind cards = if isThreeOfAKind cards then True else False
isHand (Straight AceHigh) cards =
let f (Just (PokerHand (Straight AceHigh) _)) = True
f _ = False in f $ mkStraight cards
isHand (Straight AceLow) cards =
let f (Just (PokerHand (Straight AceLow) _)) = True
f _ = False in f $ mkStraight cards
isHand Flush cards = if isFlush cards then True else False
isHand FullHouse cards = if isFullHouse cards then True else False
isHand FourOfAKind cards = if isFourOfAKind cards then True else False
isHand (StraightFlush AceHigh) cards =
let f (Just (PokerHand (StraightFlush AceHigh) _)) = True
f _ = False in f $ mkStraightFlush cards
isHand (StraightFlush AceLow) cards =
let f (Just (PokerHand (StraightFlush AceLow) _)) = True
f _ = False in f $ mkStraightFlush cards
isHand RoyalFlush cards = if isRoyalFlush cards then True else False
-- |
-- Verify that the best hand of a set of cards is a high card hand,
-- and if so, return True. Otherwise, return False.
isHighCard :: [PlayingCard] -> Bool
isHighCard hand
| isJust $ mkHighCard hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a pair hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkPair :: [PlayingCard] -> Maybe PokerHand
mkPair hand
| isValidPokerHand hand =
if (hasNumNOfRank 2 1 hand)
&& (not $ isFullHouse hand)
then Just (PokerHand Pair hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a pair hand,
-- and if so, return True. Otherwise, return False.
isPair :: [PlayingCard] -> Bool
isPair hand
| isJust $ mkPair hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a two pair,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkTwoPair :: [PlayingCard] -> Maybe PokerHand
mkTwoPair hand
| isValidPokerHand hand =
if (hasNumNOfRank 2 2 hand)
&& (not $ isFullHouse hand)
then Just (PokerHand TwoPair hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a two pair hand,
-- and if so, return True. Otherwise, return False.
isTwoPair :: [PlayingCard] -> Bool
isTwoPair hand
| isJust $ mkTwoPair hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a three-of-a-kind hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkThreeOfAKind :: [PlayingCard] -> Maybe PokerHand
mkThreeOfAKind hand
| isValidPokerHand hand =
if (hasNOfRank 3 hand)
&& (not $ isFullHouse hand)
then Just (PokerHand ThreeOfAKind hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a three-of-a-kind hand,
-- and if so, return True. Otherwise, return False.
isThreeOfAKind :: [PlayingCard] -> Bool
isThreeOfAKind hand
| isJust $ mkThreeOfAKind hand = True
| otherwise = False
mkConsecutiveRanks :: [PlayingCard] -> Maybe ([PlayingCard], AceRank)
mkConsecutiveRanks hand =
let consecHigh h = (hasConsecutiveRanks AceHighRankOrder h)
consecLow h = (hasConsecutiveRanks AceLowRankOrder h)
f h2
| consecHigh h2 = Just (sortCardsBy AceHighRankOrder h2, AceHigh)
| consecLow h2 = Just (sortCardsBy AceLowRankOrder h2, AceLow)
| otherwise = Nothing
in f hand
-- |
-- Verify that the best hand of a set of cards is a straight hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkStraight :: [PlayingCard] -> Maybe PokerHand
mkStraight hand
| isValidPokerHand hand =
let consecRanks = mkConsecutiveRanks hand
isConsecRanks = isJust consecRanks in
if isConsecRanks
&& (not $ isRoyalFlush hand)
&& (not $ isStraightFlush hand)
then Just (PokerHand (Straight $ snd $ fromJust consecRanks) hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a straight hand,
-- and if so, return True. Otherwise, return False.
isStraight :: [PlayingCard] -> Bool
isStraight hand
| isJust $ mkStraight hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a flush hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkFlush :: [PlayingCard] -> Maybe PokerHand
mkFlush hand
| isValidPokerHand hand =
if (isSameSuit hand)
&& (not $ isRoyalFlush hand)
&& (not $ isStraightFlush hand)
then Just (PokerHand Flush hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a flush hand,
-- and if so, return True. Otherwise, return False.
isFlush :: [PlayingCard] -> Bool
isFlush hand
| isJust $ mkFlush hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a full house hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkFullHouse :: [PlayingCard] -> Maybe PokerHand
mkFullHouse hand
| isValidPokerHand hand =
if (hasNOfRank 3 hand)
&& (hasNOfRank 2 hand)
then Just (PokerHand FullHouse hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a full house hand,
-- and if so, return True. Otherwise, return False.
isFullHouse :: [PlayingCard] -> Bool
isFullHouse hand
| isJust $ mkFullHouse hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a four-of-a-kind hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkFourOfAKind :: [PlayingCard] -> Maybe PokerHand
mkFourOfAKind hand
| isValidPokerHand hand =
if (hasNOfRank 4 hand)
then Just (PokerHand FourOfAKind hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a four-of-a-kind hand,
-- and if so, return True. Otherwise, return False.
isFourOfAKind :: [PlayingCard] -> Bool
isFourOfAKind hand
| isJust $ mkFourOfAKind hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a straight flush hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkStraightFlush :: [PlayingCard] -> Maybe PokerHand
mkStraightFlush hand
| isValidPokerHand hand =
let consecRanks = mkConsecutiveRanks hand
isConsecRanks = isJust consecRanks in
if isConsecRanks
&& (isSameSuit hand)
&& (not $ isRoyalFlush hand)
then Just (PokerHand (StraightFlush $ snd $ fromJust consecRanks) hand)
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a straight flush hand,
-- and if so, return True. Otherwise, return False.
isStraightFlush :: [PlayingCard] -> Bool
isStraightFlush hand
| isJust $ mkStraightFlush hand = True
| otherwise = False
-- |
-- Verify that the best hand of a set of cards is a royal flush hand,
-- and if so, return a 'PokerHand'. Otherwise, return Nothing.
mkRoyalFlush :: [PlayingCard] -> Maybe PokerHand
mkRoyalFlush hand
| isValidPokerHand hand =
if (isSameSuit hand)
then
let
slst :: [PlayingCard] = sortCardsBy AceHighRankOrder hand
rlst = toValueLst slst
in
if (rlst == [Ten, Jack, Queen, King, Ace])
then Just (PokerHand RoyalFlush hand)
else Nothing
else Nothing
| otherwise = Nothing
-- |
-- Verify that the best hand of a set of cards is a royal flush hand,
-- and if so, return True. Otherwise, return False.
isRoyalFlush :: [PlayingCard] -> Bool
isRoyalFlush hand
| isJust $ mkRoyalFlush hand = True
| otherwise = False
isValidPokerHand :: [PlayingCard] -> Bool
isValidPokerHand hand
| ((length hand) == 5) && ((dedupe hand) == hand) = True
| otherwise = False
-- |
-- All possible hands of a full deck of playing cards
allPossibleHands :: [[PlayingCard]]
allPossibleHands = choose 5 fullDeck
-- |
-- All royal flushes in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allRoyalFlush :: [[PlayingCard]]
allRoyalFlush = [x | x <- allPossibleHands, isRoyalFlush x]
-- |
-- All straight flushes in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allStraightFlush :: [[PlayingCard]]
allStraightFlush = [x | x <- allPossibleHands, isStraightFlush x]
-- |
-- All four-of-a-kinds in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allFourOfAKind :: [[PlayingCard]]
allFourOfAKind = [x | x <- allPossibleHands, isFourOfAKind x]
-- |
-- All full houses in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allFullHouse :: [[PlayingCard]]
allFullHouse = [x | x <- allPossibleHands, isFullHouse x]
-- |
-- All flushes in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allFlush :: [[PlayingCard]]
allFlush = [x | x <- allPossibleHands, isFlush x]
-- |
-- All straights in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allStraight :: [[PlayingCard]]
allStraight = [x | x <- allPossibleHands, isStraight x]
-- |
-- All three-of-a-kind in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allThreeOfAKind :: [[PlayingCard]]
allThreeOfAKind = [x | x <- allPossibleHands, isThreeOfAKind x]
-- |
-- All two pairs in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allTwoPair :: [[PlayingCard]]
allTwoPair = [x | x <- allPossibleHands, isTwoPair x]
-- |
-- All pairs in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allPair :: [[PlayingCard]]
allPair = [x | x <- allPossibleHands, isPair x]
-- |
-- All high card hands in a full deck of playing cards.
-- The current implementation traverses the entire list of allPossibleHands,
-- and is not efficient.
allHighCard :: [[PlayingCard]]
allHighCard = [x | x <- allPossibleHands, isHighCard x]
|
cgorski/general-games
|
src/Game/Game/Poker.hs
|
mit
| 22,974 | 0 | 18 | 5,294 | 6,108 | 3,147 | 2,961 | 471 | 13 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module Main where
import Network.IRC.Client
import Network.IRC.Client.Types
import System.Environment
import Options.Applicative
import Control.Applicative
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Control.Monad.IO.Class
import System.IO
import Data.Time.Clock (NominalDiffTime, getCurrentTime)
import Data.Time.Format (formatTime)
import System.Locale (defaultTimeLocale)
import Text.Printf
data Options = Options {
nickname :: Text
, chatroom :: Text
} deriving Show
parseOpts :: Parser Options
parseOpts = Options
<$> (T.pack <$> strArgument (metavar "NICK" <> help "Nickname"))
<*> (prependHash . T.pack <$> strArgument (metavar "CHANNEL" <> help "Channel, # is automatically prepended if missing"))
opts = info (helper <*> parseOpts)
(fullDesc <> header "irclog")
main = do
opt@Options{..} <- execParser opts
print opt
hSetBuffering stdout NoBuffering
let host = "chat.freenode.net"
let port = 6667
-- noopLogger, stdoutLogger
conn <- connect' noopLogger host port 1
let cfg = defaultIRCConf nickname
let hs = [joinChatHandler chatroom, messageHandler]
-- let hs = [joinChatHandler chatroom, logEventsHandler]
let cfg' = cfg { _eventHandlers = hs ++ _eventHandlers cfg }
start conn cfg'
prependHash :: Text -> Text
prependHash s | T.isPrefixOf "#" s = s
| otherwise = T.cons '#' s
logEventsHandler :: EventHandler
logEventsHandler = EventHandler "Trace chat events" EEverything logEvent
logEvent :: UnicodeEvent -> IRC ()
logEvent ev = do
liftIO . print $ ev
liftIO . print . eventType $ ev
messageHandler :: EventHandler
messageHandler = EventHandler "Log messages" EPrivmsg logMessage
-- default event handlers
-- https://hackage.haskell.org/package/irc-client-0.2.4.0/docs/src/Network-IRC-Client-Handlers.html#defaultEventHandlers
joinChatHandler ch = EventHandler "Join chatroom" ENumeric (joinChat ch)
joinChat :: Text -> (UnicodeEvent -> IRC ())
joinChat ch ev =
case _message ev of
Numeric 001 _ -> send (Join ch)
_ -> return ()
logMessage :: UnicodeEvent -> IRC ()
logMessage ev =
case _message ev of
Privmsg target m -> liftIO $ do
now <- getCurrentTime
let ts = formatTime defaultTimeLocale "%c" now
let source = T.unpack $ formatSource (_source ev)
printf "%s %20s : " ts source
-- print target
T.putStrLn (formatMsg m)
_ -> return ()
formatSource :: Source Text -> Text
formatSource (Channel name nick) = nick
formatSource (User nick) = nick
formatSource (Server n) = n
formatMsg :: Either b Text -> Text
formatMsg (Right m) = m
formatMsg (Left e) = "CTCPByteString"
{-
Channel "#haskell" "Welkin"
"#haskell"
Right "I know"
---
Channel "#haskell" "quchen"
"#haskell"
Right "But your deps lack that."
---
-}
{-
send :: UnicodeMessage -> IRC ()
type EventHandler Text EventType (UnicodeEvent -> IRC ())
handler :: Text -> EventHandler
handler chatroom = undefined -- EventHandler "Join chatroom at beginning"
run :: ByteString -> Int -> Text -> IO ()
run host port nick = do
conn <- connect host port 1
let cfg = defaultIRCConf nick
let cfg' = cfg { _handlers = yourCustomEventHandlers : _handlers cfg }
start conn cfg'
Messages
http://hackage.haskell.org/package/irc-conduit-0.1.2.0/docs/src/Network-IRC-Conduit-Internal-Messages.html#Message
-}
|
danchoi/irclog
|
Main.hs
|
mit
| 3,511 | 0 | 17 | 703 | 821 | 419 | 402 | 71 | 2 |
module Interpreter.RuntimeError
( RuntimeError(..)
) where
import Util.Error (ErrorT(kind))
data RuntimeError
= Unsupported
| UnknownVariable String
| MatchFailure
deriving (Show)
instance ErrorT RuntimeError where
kind _ = "RuntimeError"
|
tadeuzagallo/verve-lang
|
src/Interpreter/RuntimeError.hs
|
mit
| 256 | 0 | 6 | 45 | 67 | 40 | 27 | 10 | 0 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
module StringBuffer where
import Data.Monoid
import Buffer
instance Buffer String where
toString = id
fromString = id
line n b = safeIndex n (lines b)
replaceLine n l = unlines . uncurry replaceLine' . splitAt n . lines
where replaceLine' pre [] = pre
replaceLine' pre (_:ls) = pre ++ l:ls
numLines = length . lines
value = length . words
safeIndex :: Int -> [a] -> Maybe a
safeIndex n _ | n < 0 = Nothing
safeIndex _ [] = Nothing
safeIndex 0 (x:_) = Just x
safeIndex n (_:xs) = safeIndex (n-1) xs
|
mauriciofierrom/cis194-homework
|
homework07/src/StringBuffer.hs
|
mit
| 625 | 0 | 10 | 171 | 237 | 122 | 115 | 18 | 1 |
module Discussion.Data where
--------------------------------------------------------------------------------
data Term = VarT Var
| App [Term]
| Lambda [Var] Term
| Joint Term -- Termに対する処理の途中で目印として使う事がある
deriving (Eq, Show)
newtype Var = Var Identifier
deriving (Eq, Ord, Show)
type Identifier = String
--------------------------------------------------------------------------------
data Expr = Assign Var Args Term
| Reduct (Maybe Int) Term
deriving (Eq, Show)
type Args = [Var]
type Count = Int
--------------------------------------------------------------------------------
data Token = Word Identifier
| Number Int
| Symbol Identifier
| Backquote -- "`"
| LBrace -- "{"
| RBrace -- "}"
| LParen -- "("
| RParen -- ")"
| EOS -- End Of Statement
deriving (Eq, Show)
|
todays-mitsui/discussion
|
src/Discussion/Data.hs
|
mit
| 1,055 | 0 | 8 | 343 | 188 | 115 | 73 | 24 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Y2021.M01.D20.Solution where
import Control.Arrow ((&&&))
import Data.Aeson
import Data.Aeson.WikiDatum
import Graph.Query
import Graph.JSON.Cypher
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import qualified Data.ByteString.Lazy.Char8 as BL
{--
We have a JSON file from wikidata with wineries and their geo-locations, from
this SPARQL query:
SELECT ?item ?itemLabel ?location
WHERE
{
?item wdt:P31 wd:Q156362.
?item wdt:P625 ?location.
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
Parse the JSON.
--}
data Winery = Winery { winery :: WikiDatum, location :: LongLat }
deriving (Eq, Ord, Show)
instance FromJSON Winery where
parseJSON = withObject "Winery" $ \v ->
Winery <$> v *: "item" <*> v @: "location"
type Wineries = Map Name Winery
wineriesJSON, wineriesDir :: FilePath
wineriesJSON = "winery-locations.json"
wineriesDir = "Y2021/M01/D20/"
readWineries :: FilePath -> IO Wineries
readWineries json =
Map.fromList . map (name . winery &&& id) . fromMaybe [] . decode
<$> BL.readFile json
-- How many wineries are there?
{--
>>> readWineries (wineriesDir ++ wineriesJSON)
fromList [("21 Cellars",
Winery {winery = WD {qid = "http://www.wikidata.org/entity/Q4630984",
name = "21 Cellars"},
location = point({ latitude: 47.2675, longitude: -122.471 })}),
("Aaron Wines",
Winery {winery = WD {qid = "http://www.wikidata.org/entity/Q63964494",
name = "Aaron Wines"},
location = point({ latitude: 35.5762243, longitude: -120.690493 })}),...]
>>> let wineries = it
>>> Map.size wineries
610
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2021/M01/D20/Solution.hs
|
mit
| 1,781 | 0 | 12 | 381 | 251 | 147 | 104 | -1 | -1 |
import System.Environment (getArgs)
import Data.List
import Data.List.Split
import Juicer.Freeze
import Juicer.Puree
import Juicer.Blend
main :: IO ()
main = do
args <- getArgs
case args of
(diffname:archivename:[]) -> do
maybeDiff <- diffthaw diffname
maybeArchive <- thaw archivename
case (maybeDiff, maybeArchive) of
(Nothing, _) -> return ()
(_, Nothing) -> return ()
(Just diff, Just archive) -> blend diff archive archivename
otherwise -> putStrLn "blend [diff] [archive]"
blend :: Diff -> Feed -> String -> IO()
blend diff archive archivename = do
putStrLn $ show diff
putStrLn $ show archive
let archive' = merge archive diff
freeze archive' archivename
putStrLn $ show archive'
|
blanu/juicer
|
blend.hs
|
gpl-2.0
| 791 | 0 | 16 | 206 | 272 | 133 | 139 | 25 | 4 |
-- | Streaming Fasta handling via the @streaming@ library.
--
-- The functions in here should be streaming in constant memory.
--
-- A typical, slightly complicated is this:
-- @
-- forEach :: forall r . Stream (ByteString m) m r -> m (Stream (Of ()) m r)
-- forEach dna = do
-- -- extract the header, but at most 123 characters, dropping the rest
-- hdr SP.:> dta ← extractHeader (Just 123) dna
-- -- create windows @ws@ of a particular type. Include the prefix, the suffix, and make each window 10 characters long
-- let ws = (streamedWindows True True (Just 10) (SequenceIdentifier hdr) PlusStrand dta :: SP.Stream (SP.Of (BioSequenceWindow "DNA" DNA 0)) m r)
-- -- count the number of characters in @dna@, get the return value, print each window
-- count SP.:> r ← SP.mapM_ (liftIO . print) . bswSeqLength $ SP.copy ws
-- liftIO $ print count
-- liftIO $ putStrLn ""
-- -- yield one vacuous @()@ result, return the remainder @r@ from dna.
-- return $ SP.yield () *> return r
-- @
--
-- TODO Check if this is actually true with some unit tests.
module Biobase.Fasta.Streaming
( module Biobase.Fasta.Streaming
) where
import Control.Lens hiding (Index,Empty, mapped)
import Control.Monad
import Control.Monad.Trans.Resource (runResourceT, ResourceT(..), MonadResource)
import Data.Semigroup as SG
import Debug.Trace
import GHC.Generics (Generic)
import GHC.TypeLits
import Prelude as P
import qualified Data.ByteString.Char8 as BS
import qualified Streaming.Internal as SI
import Streaming as S
import Streaming.ByteString as BSS
import Streaming.ByteString.Char8 as S8
import Streaming.ByteString.Internal as SBI
import Streaming.Prelude as SP
import Data.ByteString.Streaming.Split
import Biobase.Types.BioSequence
import Biobase.Types.Index.Type
import Biobase.Types.Location
import Biobase.Types.Position
import Biobase.Types.Strand
-- |
streamedFasta :: (Monad m) => ByteStream m r -> Stream (Stream (ByteStream m) m) m r
{-# Inlinable streamedFasta #-}
streamedFasta = S.maps collapseData . streamOfStreamedFasta
-- | Here each individual fasta file will be a stream.
--
-- TODO Once this works, @streamingFasta@ should be @S.concats . streamOfStreamedFasta@ ...
streamOfStreamedFasta
:: forall m r
. ( Monad m )
=> ByteStream m r
-> Stream (Stream (ByteStream m) m) m r
-- ^
{-# Inlinable streamOfStreamedFasta #-}
streamOfStreamedFasta = go . S8.lines where
go = \case
SI.Return r -> SI.Return r
SI.Effect m -> SI.Effect (fmap go m)
SI.Step fs -> SI.Step (SI.Step (fmap (fmap go . splitFasta) fs))
-- | Given a 'Stream (ByteString m) m r' which is a 'Stream' of @lines@, split
-- off the first @Fasta@ entry.
splitFasta :: (Monad m) => Stream (ByteStream m) m r -> Stream (ByteStream m) m (Stream (ByteStream m) m r)
{-# Inlinable splitFasta #-}
splitFasta = loop False where
loop hdr = \case
SI.Return r -> SI.Return (SI.Return r)
SI.Effect m -> SI.Effect (fmap (loop hdr) m)
SI.Step bs -> case bs of
Empty r -> loop hdr r
Chunk cs xs
| BS.null cs -> loop hdr $ SI.Step xs
| h=='>' || h==';' -> if hdr then SI.Return (SI.Step bs) else SI.Step $ fmap (loop True) bs
| otherwise -> SI.Step $ fmap (loop True) bs
where h = BS.head cs
Go m -> SI.Effect $ fmap ((loop hdr) . SI.Step) m
-- | Given a stream, roughly like @[BS "Header", BS "Data1", BS "Data2", ...]@
-- create a stream like @[BS "Header", BS "Data"]@. This means that the
-- resulting stream holds exactly two @ByteString@'s.
collapseData :: (Monad m) => Stream (ByteStream m) m r -> Stream (ByteStream m) m r
{-# Inlinable collapseData #-}
collapseData = loop where
loop = \case
SI.Return r -> SI.Return r
SI.Effect m -> SI.Effect (fmap loop m)
SI.Step bs -> case bs of
Empty r -> loop r
Chunk cs xs
| BS.null cs -> loop $ SI.Step xs
| h=='>' || h==';' -> SI.Step $ fmap (S.yields . S8.concat) bs
| otherwise -> SI.Step $ fmap loop bs
where h = BS.head cs
Go m -> SI.Effect $ fmap (loop . SI.Step) m
-- | "Rechunk" a stream of bytestrings.
reChunkBS :: (Monad m) => Int -> Stream (ByteStream m) m r -> Stream (ByteStream m) m r
{-# Inlinable reChunkBS #-}
reChunkBS n = splitsByteStringAt n . S8.concat
-- | Assuming a "rechunked" stream of bytestrings, create sequence windows.
chunksToWindows :: Monad m => SequenceIdentifier w -> Strand -> Stream (ByteStream m) m r -> Stream (Of (Location w FwdPosition (BioSequence ty))) m r
{-# Inlinable chunksToWindows #-}
chunksToWindows seqId s = SP.map go . SP.drop 1 . SP.scan indexed (BS.empty, 0, 0) (\(bs,i,_) -> (bs,i)) . S.mapsM S8.toStrict where
indexed (_,cur,next) bs = (bs,next,next + BS.length bs)
go (bs,i)
= Location
{ _locIdentifier = seqId
, _locPosition = FwdPosition s (Index i)
, _locSequence = BioSequence bs
}
-- | Make it possible to take a fasta stream and produce a stream of
-- 'BioSequenceWindow's. This is a convenience function around
-- 'withSuffix . withPrefix . chunksToWindows . reChunks'.
--
-- In case of a @Nothing@ window size, a single huge @Fasta@ entry is produced
-- (and materialized!).
--
-- TODO In case of @Nothing@ window size, we use the 'collapseData' function
-- which has one check too many, and will be slightly slower. However, the
-- check should be once per @ByteString@.
streamedWindows
:: (Monad m)
=> Maybe Int
-> Maybe Int
-> Maybe Int
-- ^ desired size or a single huge @Fasta@ entry.
-> SequenceIdentifier w
-> Strand
-> (Stream (ByteStream m) m) r
-- -> Stream (Of (BioSequenceWindow w ty FwdLocation)) m r
-> Stream (Of (PIS w FwdPosition (BioSequence ty))) m r
{-# Inlinable streamedWindows #-}
streamedWindows withPrefix withSuffix winSz seqId strnd
= (maybe id attachSuffixes withSuffix)
. (maybe id attachPrefixes withPrefix)
. SP.map pis
. chunksToWindows seqId strnd
. (case winSz of { Nothing -> collapseData; Just sz -> reChunkBS sz })
-- | Get the full length of a stream of 'BioSequenceWindow's, counted in
-- characters in each 'bswSequence'.
--
-- To use, start with @bswSeqLength $ SP.copy xs@. Then consume this stream
-- normally. It still provides a 'Stream' of 'BioSequenceWindows's. However,
-- the return type is now not just @r@, but it provides @Int SP.:> r@, where
-- the @Int@ provides the total length of characters within this @Fasta@ entry.
--
-- This value may then be used to fully update negative strand information.
streamLocationLength :: (Monad m, ModifyLocation posTy seqTy) => Stream (Of (Location i posTy seqTy)) m r -> m (Of Int r)
{-# Inlinable streamLocationLength #-}
streamLocationLength = SP.fold (\x w -> x + locLength w) 0 id
-- | As a first function, the header should be extracted from a @Fasta@ stream. Since headers may be
-- malformed / malicious, we make it possible to
extractHeader
:: (Monad m)
=> Maybe Int
-> Stream (ByteStream m) m r
-> m (Of BS.ByteString (Stream (ByteStream m) m r))
{-# Inlinable extractHeader #-}
extractHeader hdrSz =
let go = case hdrSz of { Nothing -> id; Just sz -> S8.drained . S8.splitAt (fromIntegral sz) }
in S8.toStrict . go . S8.concat . S.splitsAt 1
{-
t0 = P.unlines
[ ">Aaaa"
, "123"
, ">Bbbb"
, "4567"
, ">Cccc"
, "890"
]
r4 = toList . streamingFasta (HeaderSize 2) (OverlapSize 1) (CurrentSize 2) . S8.fromStrict $ BS.pack t0
-}
{-
--eachFasta (Header h) (Overlap o) (Current c p) = SP.yield (h,o,c)
eachFasta (Header h) (Overlap o) (Current c p) = SP.yield (BS.length h, BS.length o, BS.length c)
--readFastaFile :: FilePath -> IO [(BS.ByteString,BS.ByteString,BS.ByteString)]
readFastaFile f = do
let s = 1000000000000
r ← runResourceT
$ SP.mapM_ (liftIO . P.print)
$ streamingFasta (HeaderSize s) (OverlapSize 0) (CurrentSize s) eachFasta
$ S8.readFile f
return r
-}
{-
readFastaFile f = do
let s = 1000000000000
r ← runResourceT
$ SP.mapM_ (liftIO . P.print)
$ SP.mapped S8.toStrict
$ S8.split '>'
$ S8.readFile f
return r
-}
|
choener/BiobaseFasta
|
Biobase/Fasta/Streaming.hs
|
gpl-3.0
| 8,118 | 0 | 18 | 1,704 | 1,775 | 944 | 831 | -1 | -1 |
module NumberTheory.ModularExponentiation
where
import NumberTheory.PositionalBases
modular_pow a k m = m_P 1 0
where m_P c e
| e == k = c
| otherwise = m_P (mod (c*a) m) (e+1)
modular_pow_bin_method a k m = (product $ zipWith (expv2) (binary_Powers_a) (binary_Digits_k)) `mod` m
where
binary_Digits_k = reverse $ findDigits 2 k
binary_Powers_a = take (length binary_Digits_k) $ iterate (\x -> mp2 x) a
expv2 a b
| a == 0 && b == 0 = 1
| a == 0 && b == 1 = 0
| otherwise = a^b
mp2 x = (mod (x*x) m)
|
mathlover2/number-theory
|
NumberTheory/ModularExponentiation.hs
|
gpl-3.0
| 559 | 5 | 12 | 157 | 264 | 134 | 130 | 14 | 1 |
-- file: ch02/add.hs
add a b = a + b
|
craigem/RealWorldHaskell
|
ch02/add.hs
|
gpl-3.0
| 37 | 0 | 5 | 10 | 16 | 8 | 8 | 1 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{- |
Module : Types.hs
Description : Central data type and Yesod typeclass instances.
Copyright : (c) 2011 Cedric Staub
License : GPL-3
Maintainer : Simon Meier <[email protected]>
Stability : experimental
Portability : non-portable
-}
module Web.Types
( WebUI(..)
, Route (..)
, resourcesWebUI
, TheoryInfo(..)
, DiffTheoryInfo(..)
, EitherTheoryInfo(..)
, isTheoryInfo
, isDiffTheoryInfo
, getEitherTheoryName
, getEitherTheoryTime
, getEitherTheoryPrimary
, getEitherTheoryOrigin
, getEitherTheoryIndex
, TheoryPath(..)
, DiffTheoryPath(..)
, TheoryOrigin(..)
, JsonResponse(..)
, TheoryIdx
, TheoryMap
, ThreadMap
-- , GenericHandler
, Handler
-- , URL rendering function
, RenderUrl
-- , GenericWidget
, Widget
-- Image rendering
, ImageFormat(..)
, imageFormatMIME
)
where
-- import Control.Applicative
import Control.Concurrent
import Data.Label
import qualified Data.Map as M
import Data.Maybe (listToMaybe)
-- import Data.Monoid (mconcat)
import Data.Ord (comparing)
import qualified Data.Text as T
import Data.Time.LocalTime
import qualified Data.Binary as Bin
import Data.Binary.Orphans()
import Control.DeepSeq
import GHC.Generics (Generic)
import Text.Hamlet
import Yesod.Core
import Yesod.Static
import Theory
------------------------------------------------------------------------------
-- Types
------------------------------------------------------------------------------
-- | Type synonym for a generic handler inside our site.
-- type GenericHandler m = Handler WebUI WebUI m
-- type Handler a = Handler WebUI WebUI a
-- | Type synonym for a generic widget inside our site.
-- type GenericWidget m = Widget WebUI (GenericHandler m)
-- type Widget a = Widget WebUI WebUI a
-- | Type synonym representing a numeric index for a theory.
type TheoryIdx = Int
-- | Type synonym representing a map of theories.
type TheoryMap = M.Map TheoryIdx (EitherTheoryInfo)
-- | Type synonym representing a map of threads.
type ThreadMap = M.Map T.Text ThreadId
-- | The image format used for rendering graphs.
data ImageFormat = PNG | SVG
instance Show ImageFormat where
show PNG = "png"
show SVG = "svg"
-- | convert image format to MIME type.
imageFormatMIME :: ImageFormat -> String
imageFormatMIME PNG = "image/png"
imageFormatMIME SVG = "image/svg+xml"
-- | The so-called site argument for our application, which can hold various
-- information that can use to keep info that needs to be available to the
-- handler functions.
data WebUI = WebUI
{ getStatic :: Static
-- ^ Settings for static file serving.
, cacheDir :: FilePath
-- ^ The caching directory (for storing rendered graphs).
, workDir :: FilePath
-- ^ The working directory (for storing/loading theories).
-- , parseThy :: MonadIO m => String -> GenericHandler m ClosedTheory
, parseThy :: String -> IO (Either String (ClosedTheory))
-- ^ Close an open theory according to command-line arguments.
, diffParseThy :: String -> IO (Either String (ClosedDiffTheory))
-- ^ Close an open theory according to command-line arguments.
, thyWf :: String -> IO String
-- ^ Report on the wellformedness of a theory according to command-line arguments.
, theoryVar :: MVar (TheoryMap)
-- ^ MVar that holds the theory map
, threadVar :: MVar ThreadMap
-- ^ MVar that holds the thread map
, autosaveProofstate :: Bool
-- ^ Automatically store theory map
, graphCmd :: (String, FilePath)
-- ^ The dot or json command with additional flag to indicate choice dot, json, ...
, imageFormat :: ImageFormat
-- ^ The image-format used for rendering graphs
, defaultAutoProver :: AutoProver
-- ^ The default prover to use for automatic proving.
, debug :: Bool
-- ^ Output debug messages
, isDiffTheory :: Bool
-- ^ Output debug messages
}
-- | Simple data type for generating JSON responses.
data JsonResponse
= JsonHtml T.Text Content -- ^ Title and HTML content
| JsonAlert T.Text -- ^ Alert/dialog box with message
| JsonRedirect T.Text -- ^ Redirect to given URL
-- | Data type representing origin of theory.
-- Command line with file path, upload with filename (not path),
-- or created by interactive mode (e.g. through editing).
data TheoryOrigin = Local FilePath | Upload String | Interactive
deriving (Show, Eq, Ord, Generic, Bin.Binary, NFData)
-- | Data type containg both the theory and it's index, making it easier to
-- pass the two around (since they are always tied to each other). We also
-- keep some extra bookkeeping information.
data TheoryInfo = TheoryInfo
{ tiIndex :: TheoryIdx -- ^ Index of theory.
, tiTheory :: ClosedTheory -- ^ The closed theory.
, tiTime :: ZonedTime -- ^ Time theory was loaded.
, tiParent :: Maybe TheoryIdx -- ^ Prev theory in history
, tiPrimary :: Bool -- ^ This is the orginally loaded theory.
, tiOrigin :: TheoryOrigin -- ^ Origin of theory.
, tiAutoProver :: AutoProver -- ^ The automatic prover to use.
} deriving (Generic, Bin.Binary)
-- | Data type containg both the theory and it's index, making it easier to
-- pass the two around (since they are always tied to each other). We also
-- keep some extra bookkeeping information.
data DiffTheoryInfo = DiffTheoryInfo
{ dtiIndex :: TheoryIdx -- ^ Index of theory.
, dtiTheory :: ClosedDiffTheory -- ^ The closed theory.
, dtiTime :: ZonedTime -- ^ Time theory was loaded.
, dtiParent :: Maybe TheoryIdx -- ^ Prev theory in history
, dtiPrimary :: Bool -- ^ This is the orginally loaded theory.
, dtiOrigin :: TheoryOrigin -- ^ Origin of theory.
, dtiAutoProver :: AutoProver -- ^ The automatic prover to use.
} deriving (Generic, Bin.Binary)
-- | We use the ordering in order to display loaded theories to the user.
-- We first compare by name, then by time loaded, and then by source: Theories
-- that were loaded from the command-line are displayed earlier then
-- interactively loaded ones.
compareTI :: TheoryInfo -> TheoryInfo -> Ordering
compareTI (TheoryInfo _ i1 t1 p1 a1 o1 _) (TheoryInfo _ i2 t2 p2 a2 o2 _) =
mconcat
[ comparing (get thyName) i1 i2
, comparing zonedTimeToUTC t1 t2
, compare a1 a2
, compare p1 p2
, compare o1 o2
]
-- | We use the ordering in order to display loaded theories to the user.
-- We first compare by name, then by time loaded, and then by source: Theories
-- that were loaded from the command-line are displayed earlier then
-- interactively loaded ones.
compareDTI :: DiffTheoryInfo -> DiffTheoryInfo -> Ordering
compareDTI (DiffTheoryInfo _ i1 t1 p1 a1 o1 _) (DiffTheoryInfo _ i2 t2 p2 a2 o2 _) =
mconcat
[ comparing (get diffThyName) i1 i2
, comparing zonedTimeToUTC t1 t2
, compare a1 a2
, compare p1 p2
, compare o1 o2
]
data EitherTheoryInfo = Trace TheoryInfo | Diff DiffTheoryInfo deriving (Generic, Bin.Binary)
-- instance Bin.Binary TheoryInfo
-- instance Bin.Binary DiffTheoryInfo
-- instance Bin.Binary EitherTheoryInfo
{- instance Bin.Binary EitherTheoryInfo where
put (Trace i) = do Bin.put (0 :: Bin.Word8)
Bin.put i
put (Diff i) = do Bin.put (1 :: Bin.Word8)
Bin.put i -}
{- get = do t <- get :: Bin.Get Bin.Word8
case t of
0 -> do i <- Bin.get
return (Trace i)
1 -> do i <- Bin.get
return (Diff i) -}
{- get = do tag <- Bin.getWord8
case tag of
0 -> liftM Trace get
1 -> liftM Diff get -}
-- Direct access functionf for Either Theory Type
getEitherTheoryName :: EitherTheoryInfo -> String
getEitherTheoryName (Trace i) = get thyName (tiTheory i)
getEitherTheoryName (Diff i) = get diffThyName (dtiTheory i)
isTheoryInfo :: EitherTheoryInfo -> Bool
isTheoryInfo (Trace _) = True
isTheoryInfo (Diff _) = False
isDiffTheoryInfo :: EitherTheoryInfo -> Bool
isDiffTheoryInfo (Trace _) = False
isDiffTheoryInfo (Diff _) = True
getEitherTheoryTime :: EitherTheoryInfo -> ZonedTime
getEitherTheoryTime (Trace i) = (tiTime i)
getEitherTheoryTime (Diff i) = (dtiTime i)
getEitherTheoryPrimary :: EitherTheoryInfo -> Bool
getEitherTheoryPrimary (Trace i) = (tiPrimary i)
getEitherTheoryPrimary (Diff i) = (dtiPrimary i)
getEitherTheoryOrigin :: EitherTheoryInfo -> TheoryOrigin
getEitherTheoryOrigin (Trace i) = (tiOrigin i)
getEitherTheoryOrigin (Diff i) = (dtiOrigin i)
getEitherTheoryIndex :: EitherTheoryInfo -> TheoryIdx
getEitherTheoryIndex (Trace i) = (tiIndex i)
getEitherTheoryIndex (Diff i) = (dtiIndex i)
-- | We use the ordering in order to display loaded theories to the user.
-- We first compare by name, then by time loaded, and then by source: Theories
-- that were loaded from the command-line are displayed earlier then
-- interactively loaded ones.
compareEDTI :: EitherTheoryInfo -> EitherTheoryInfo -> Ordering
compareEDTI (Trace (TheoryInfo _ i1 t1 p1 a1 o1 _)) (Trace (TheoryInfo _ i2 t2 p2 a2 o2 _)) =
mconcat
[ comparing (get thyName) i1 i2
, comparing zonedTimeToUTC t1 t2
, compare a1 a2
, compare p1 p2
, compare o1 o2
]
compareEDTI (Diff (DiffTheoryInfo _ i1 t1 p1 a1 o1 _)) (Diff (DiffTheoryInfo _ i2 t2 p2 a2 o2 _)) =
mconcat
[ comparing (get diffThyName) i1 i2
, comparing zonedTimeToUTC t1 t2
, compare a1 a2
, compare p1 p2
, compare o1 o2
]
compareEDTI (Diff (DiffTheoryInfo _ i1 t1 p1 a1 o1 _)) (Trace (TheoryInfo _ i2 t2 p2 a2 o2 _)) =
mconcat
[ compare ((get diffThyName) i1) ((get thyName) i2)
, comparing zonedTimeToUTC t1 t2
, compare a1 a2
, compare p1 p2
, compare o1 o2
]
compareEDTI (Trace (TheoryInfo _ i1 t1 p1 a1 o1 _)) (Diff (DiffTheoryInfo _ i2 t2 p2 a2 o2 _)) =
mconcat
[ compare ((get thyName) i1) ((get diffThyName) i2)
, comparing zonedTimeToUTC t1 t2
, compare a1 a2
, compare p1 p2
, compare o1 o2
]
instance Eq (TheoryInfo) where
(==) t1 t2 = compareTI t1 t2 == EQ
instance Ord (TheoryInfo) where
compare = compareTI
instance Eq (DiffTheoryInfo) where
(==) t1 t2 = compareDTI t1 t2 == EQ
instance Ord (DiffTheoryInfo) where
compare = compareDTI
instance Eq (EitherTheoryInfo) where
(==) t1 t2 = compareEDTI t1 t2 == EQ
instance Ord (EitherTheoryInfo) where
compare = compareEDTI
-- | Simple data type for specifying a path to a specific
-- item within a theory.
data TheoryPath
= TheoryHelp -- ^ The help view (help and info about theory)
| TheoryLemma String -- ^ Theory lemma with given name
| TheorySource SourceKind Int Int -- ^ Required cases (i'th source, j'th case)
| TheoryProof String ProofPath -- ^ Proof path within proof for given lemma
| TheoryMethod String ProofPath Int -- ^ Apply the proof method to proof path
| TheoryRules -- ^ Theory rules
| TheoryMessage -- ^ Theory message deduction
deriving (Eq, Show, Read)
-- | Simple data type for specifying a path to a specific
-- item within a theory.
data DiffTheoryPath
= DiffTheoryHelp -- ^ The help view (help and info about theory)
| DiffTheoryLemma Side String -- ^ Theory lemma with given name and side
| DiffTheoryDiffLemma String -- ^ Theory DiffLemma with given name
| DiffTheorySource Side SourceKind Bool Int Int -- ^ Required cases (i'th source, j'th case)
| DiffTheoryProof Side String ProofPath -- ^ Proof path within proof for given lemma
| DiffTheoryDiffProof String ProofPath -- ^ Proof path within proof for given lemma
| DiffTheoryMethod Side String ProofPath Int -- ^ Apply the proof method to proof path
| DiffTheoryDiffMethod String ProofPath Int -- ^ Apply the proof method to proof path
| DiffTheoryRules Side Bool -- ^ Theory rules per side
| DiffTheoryDiffRules -- ^ Theory rules unprocessed
| DiffTheoryMessage Side Bool -- ^ Theory message deduction per side
deriving (Eq, Show, Read)
-- | Render a theory path to a list of strings. Note that we prefix an
-- underscore to the empty string and strings starting with an underscore.
-- This avoids empty path segments, which seem to trip up certain versions of
-- Yesod.
renderTheoryPath :: TheoryPath -> [String]
renderTheoryPath =
map prefixWithUnderscore . go
where
go TheoryHelp = ["help"]
go TheoryRules = ["rules"]
go TheoryMessage = ["message"]
go (TheoryLemma name) = ["lemma", name]
go (TheorySource k i j) = ["cases", show k, show i, show j]
go (TheoryProof lemma path) = "proof" : lemma : path
go (TheoryMethod lemma path idx) = "method" : lemma : show idx : path
-- | Render a theory path to a list of strings. Note that we prefix an
-- underscore to the empty string and strings starting with an underscore.
-- This avoids empty path segments, which seem to trip up certain versions of
-- Yesod.
renderDiffTheoryPath :: DiffTheoryPath -> [String]
renderDiffTheoryPath =
map prefixWithUnderscore . go
where
go DiffTheoryHelp = ["help"]
go (DiffTheoryLemma s name) = ["lemma", show s, name]
go (DiffTheoryDiffLemma name) = ["difflemma", name]
go (DiffTheorySource s k i j d) = ["cases", show s, show k, show i, show j, show d]
go (DiffTheoryProof s lemma path) = "proof" : show s : lemma : path
go (DiffTheoryDiffProof lemma path) = "diffProof" : lemma : path
go (DiffTheoryMethod s lemma path idx) = "method" : show s : lemma : show idx : path
go (DiffTheoryDiffMethod lemma path idx) = "diffMethod" : lemma : show idx : path
go (DiffTheoryRules s d) = ["rules", show s, show d]
go (DiffTheoryDiffRules) = ["diffrules"]
go (DiffTheoryMessage s d) = ["message", show s, show d]
-- | Prefix an underscore to the empty string and strings starting with an
-- underscore.
prefixWithUnderscore :: String -> String
prefixWithUnderscore "" = "_"
prefixWithUnderscore cs@('_':_) = '_' : cs
prefixWithUnderscore cs = cs
-- | Remove an underscore prefix. It holds that
--
-- > unprefixUnderscore . prefixWithUnderscore = id
--
-- The inverted composition holds for all strings except the empty string and
-- strings starting with an underscore.
unprefixUnderscore :: String -> String
unprefixUnderscore "_" = ""
unprefixUnderscore ('_':cs@('_':_)) = cs
unprefixUnderscore cs = cs
-- | Parse a list of strings into a theory path.
parseTheoryPath :: [String] -> Maybe TheoryPath
parseTheoryPath =
parse . map unprefixUnderscore
where
parse [] = Nothing
parse (x:xs) = case x of
"help" -> Just TheoryHelp
"rules" -> Just TheoryRules
"message" -> Just TheoryMessage
"lemma" -> parseLemma xs
"cases" -> parseCases xs
"proof" -> parseProof xs
"method" -> parseMethod xs
_ -> Nothing
safeRead = listToMaybe . map fst . reads
parseLemma ys = TheoryLemma <$> listToMaybe ys
parseProof (y:ys) = Just (TheoryProof y ys)
parseProof _ = Nothing
parseMethod (y:z:zs) = safeRead z >>= Just . TheoryMethod y zs
parseMethod _ = Nothing
parseCases (kind:y:z:_) = do
k <- case kind of "refined" -> return RefinedSource
"raw" -> return RawSource
_ -> Nothing
m <- safeRead y
n <- safeRead z
return (TheorySource k m n)
parseCases _ = Nothing
-- | Parse a list of strings into a theory path.
parseDiffTheoryPath :: [String] -> Maybe DiffTheoryPath
parseDiffTheoryPath =
parse . map unprefixUnderscore
where
parse [] = Nothing
parse (x:xs) = case x of
"help" -> Just DiffTheoryHelp
"diffrules" -> Just DiffTheoryDiffRules
"rules" -> parseRules xs
"message" -> parseMessage xs
"lemma" -> parseLemma xs
"difflemma" -> parseDiffLemma xs
"cases" -> parseCases xs
"proof" -> parseProof xs
"diffProof" -> parseDiffProof xs
"method" -> parseMethod xs
"diffMethod"-> parseDiffMethod xs
_ -> Nothing
safeRead :: Read a => String -> Maybe a
safeRead = listToMaybe . map fst . reads
parseRules :: [String] -> Maybe DiffTheoryPath
parseRules (y:z:_) = do
s <- case y of "LHS" -> return LHS
"RHS" -> return RHS
_ -> Nothing
d <- case z of "True" -> return True
"False" -> return False
_ -> Nothing
return (DiffTheoryRules s d)
parseRules _ = Nothing
parseMessage :: [String] -> Maybe DiffTheoryPath
parseMessage (y:z:_) = do
s <- case y of "LHS" -> return LHS
"RHS" -> return RHS
_ -> Nothing
d <- case z of "True" -> return True
"False" -> return False
_ -> Nothing
return (DiffTheoryMessage s d)
parseMessage _ = Nothing
parseLemma :: [String] -> Maybe DiffTheoryPath
parseLemma (y:ys) = do
s <- case y of "LHS" -> return LHS
"RHS" -> return RHS
_ -> Nothing
return (DiffTheoryLemma s (head ys))
parseLemma _ = Nothing
parseDiffLemma :: [String] -> Maybe DiffTheoryPath
parseDiffLemma ys = DiffTheoryDiffLemma <$> listToMaybe ys
parseProof :: [String] -> Maybe DiffTheoryPath
parseProof (y:z:zs) = do
s <- case y of "LHS" -> return LHS
"RHS" -> return RHS
_ -> Nothing
return (DiffTheoryProof s z zs)
parseProof _ = Nothing
parseDiffProof :: [String] -> Maybe DiffTheoryPath
parseDiffProof (z:zs) = do
return (DiffTheoryDiffProof z zs)
parseDiffProof _ = Nothing
parseMethod :: [String] -> Maybe DiffTheoryPath
parseMethod (x:y:z:zs) = do
s <- case x of "LHS" -> return LHS
"RHS" -> return RHS
_ -> Nothing
i <- safeRead z
return (DiffTheoryMethod s y zs i)
parseMethod _ = Nothing
parseDiffMethod :: [String] -> Maybe DiffTheoryPath
parseDiffMethod (y:z:zs) = do
i <- safeRead z
return (DiffTheoryDiffMethod y zs i)
parseDiffMethod _ = Nothing
parseCases :: [String] -> Maybe DiffTheoryPath
parseCases (x:kind:pd:y:z:_) = do
s <- case x of "LHS" -> return LHS
"RHS" -> return RHS
_ -> Nothing
k <- case kind of "refined" -> return RefinedSource
"raw" -> return RawSource
_ -> Nothing
d <- case pd of "True" -> return True
"False" -> return False
_ -> Nothing
m <- safeRead y
n <- safeRead z
return (DiffTheorySource s k d m n)
parseCases _ = Nothing
type RenderUrl = Route (WebUI) -> T.Text
------------------------------------------------------------------------------
-- Routing
------------------------------------------------------------------------------
-- | Static routing for our application.
-- Note that handlers ending in R are general handlers,
-- whereas handlers ending in MR are for the main view
-- and the ones ending in DR are for the debug view.
mkYesodData "WebUI" [parseRoutes|
/ RootR GET POST
/thy/trace/#Int/overview/*TheoryPath OverviewR GET
/thy/trace/#Int/source TheorySourceR GET
/thy/trace/#Int/message TheoryMessageDeductionR GET
/thy/trace/#Int/main/*TheoryPath TheoryPathMR GET
-- /thy/trace/#Int/debug/*TheoryPath TheoryPathDR GET
/thy/trace/#Int/graph/*TheoryPath TheoryGraphR GET
/thy/trace/#Int/autoprove/#SolutionExtractor/#Int/*TheoryPath AutoProverR GET
/thy/trace/#Int/next/#String/*TheoryPath NextTheoryPathR GET
/thy/trace/#Int/prev/#String/*TheoryPath PrevTheoryPathR GET
-- /thy/trace/#Int/save SaveTheoryR GET
/thy/trace/#Int/download/#String DownloadTheoryR GET
-- /thy/trace/#Int/edit/source EditTheoryR GET POST
-- /thy/trace/#Int/edit/path/*TheoryPath EditPathR GET POST
/thy/trace/#Int/del/path/*TheoryPath DeleteStepR GET
/thy/trace/#Int/unload UnloadTheoryR GET
/thy/equiv/#Int/overview/*DiffTheoryPath OverviewDiffR GET
/thy/equiv/#Int/source TheorySourceDiffR GET
/thy/equiv/#Int/message TheoryMessageDeductionDiffR GET
/thy/equiv/#Int/main/*DiffTheoryPath TheoryPathDiffMR GET
-- /thy/equiv/#Int/debug/*DiffTheoryPath TheoryPathDiffDR GET
/thy/equiv/#Int/graph/*DiffTheoryPath TheoryGraphDiffR GET
/thy/equiv/#Int/mirror/*DiffTheoryPath TheoryMirrorDiffR GET
/thy/equiv/#Int/autoprove/#SolutionExtractor/#Int/#Side/*DiffTheoryPath AutoProverDiffR GET
/thy/equiv/#Int/autoproveDiff/#SolutionExtractor/#Int/*DiffTheoryPath AutoDiffProverR GET
/thy/equiv/#Int/next/#String/*DiffTheoryPath NextTheoryPathDiffR GET
/thy/equiv/#Int/prev/#String/*DiffTheoryPath PrevTheoryPathDiffR GET
-- /thy/equiv/#Int/save SaveTheoryR GET
/thy/equiv/#Int/download/#String DownloadTheoryDiffR GET
-- /thy/equiv/#Int/edit/source EditTheoryR GET POST
-- /thy/equiv/#Int/edit/path/*DiffTheoryPath EditPathDiffR GET POST
/thy/equiv/#Int/del/path/*DiffTheoryPath DeleteStepDiffR GET
/thy/equiv/#Int/unload UnloadTheoryDiffR GET
/kill KillThreadR GET
-- /threads ThreadsR GET
/robots.txt RobotsR GET
/favicon.ico FaviconR GET
/static StaticR Static getStatic
|]
instance PathPiece SolutionExtractor where
toPathPiece CutNothing = "characterize"
toPathPiece CutDFS = "idfs"
toPathPiece CutBFS = "bfs"
toPathPiece CutSingleThreadDFS = "seqdfs"
fromPathPiece "characterize" = Just CutNothing
fromPathPiece "idfs" = Just CutDFS
fromPathPiece "bfs" = Just CutBFS
fromPathPiece "seqdfs" = Just CutSingleThreadDFS
fromPathPiece _ = Nothing
instance PathPiece Side where
toPathPiece LHS = "LHS"
toPathPiece RHS = "RHS"
fromPathPiece "LHS" = Just LHS
fromPathPiece "RHS" = Just RHS
fromPathPiece _ = Nothing
-- | MultiPiece instance for TheoryPath.
instance PathMultiPiece TheoryPath where
toPathMultiPiece = map T.pack . renderTheoryPath
fromPathMultiPiece = parseTheoryPath . map T.unpack
-- | MultiPiece instance for DiffTheoryPath.
instance PathMultiPiece DiffTheoryPath where
toPathMultiPiece = map T.pack . renderDiffTheoryPath
fromPathMultiPiece = parseDiffTheoryPath . map T.unpack
-- Instance of the Yesod typeclass.
instance Yesod WebUI where
-- | The approot. We can leave this empty because the
-- application is always served from the root of the server.
approot = ApprootStatic T.empty
-- | The default layout for rendering.
defaultLayout = defaultLayout'
-- | The path cleaning function. We make sure empty strings
-- are not scrubbed from the end of the list. The default
-- cleanPath function forces canonical URLs.
cleanPath _ = Right
------------------------------------------------------------------------------
-- Default layout
------------------------------------------------------------------------------
-- | Our application's default layout template.
-- Note: We define the default layout here even tough it doesn't really
-- belong in the "types" module in order to avoid mutually recursive modules.
-- defaultLayout' :: (Yesod master, Route master ~ WebUIRoute)
-- => Widget master () -- ^ Widget to embed in layout
-- -> Handler master Html
defaultLayout' :: Widget -> Handler Html
defaultLayout' w = do
page <- widgetToPageContent w
message <- getMessage
withUrlRenderer [hamlet|
$newline never
!!!
<html>
<head>
<title>#{pageTitle page}
<link rel=stylesheet href=/static/css/tamarin-prover-ui.css>
<link rel=stylesheet href=/static/css/jquery-contextmenu.css>
<link rel=stylesheet href=/static/css/smoothness/jquery-ui.css>
<script src=/static/js/jquery.js></script>
<script src=/static/js/jquery-ui.js></script>
<script src=/static/js/jquery-layout.js></script>
<script src=/static/js/jquery-cookie.js></script>
<script src=/static/js/jquery-superfish.js></script>
<script src=/static/js/jquery-contextmenu.js></script>
<script src=/static/js/tamarin-prover-ui.js></script>
^{pageHead page}
<body>
$maybe msg <- message
<p.message>#{msg}
<p.loading>
Loading, please wait...
\ <a id=cancel href='#'>Cancel</a>
^{pageBody page}
<div#dialog>
<ul#contextMenu>
<li.autoprove>
<a href="#autoprove">Autoprove</a>
|]
-- <li.delstep>
-- <a href="#del/path">Remove step</a>
|
kmilner/tamarin-prover
|
src/Web/Types.hs
|
gpl-3.0
| 26,863 | 0 | 13 | 7,643 | 4,588 | 2,447 | 2,141 | -1 | -1 |
{-# language OverloadedStrings #-}
module Main where
import Spieler
import Game
import Bank
import Registrar
import Logger
import State
import Chart ( chart_location )
import Rating ( taxman, taxman2, chartman )
import Network.Wai.Handler.Warp
import Network.HTTP.Types (statusOK)
import Network.Wai
import Control.Concurrent
import Control.Concurrent.STM
import System.IO
import Control.Monad ( forever )
import qualified Data.Map as M
import System.Environment
import qualified Data.ByteString.Lazy as LBS
import Control.Monad.IO.Class ( liftIO )
main = do
hSetBuffering stderr LineBuffering
hSetBuffering stdout LineBuffering
[ port, passwd_file ] <- getArgs
passwd <- readFile passwd_file
let passwd_map = M.fromList $ do
l <- lines passwd ; [ n, p ] <- return $ words l
return ( Name n, Password p )
server <- State.make passwd_map
forkIO $ forever $ game server
forkIO $ forever $ do
chartman server
threadDelay ( 60 * 10 ^ 6 )
taxman2 server
Network.Wai.Handler.Warp.runSettings
( defaultSettings { settingsTimeout = 1
, settingsPort = read port }
) $ \ req -> case pathInfo req of
[ "rpc" ] -> registrar server req
[ "log" ] -> logger server req
[ "chart" ] -> do
s <- liftIO $ LBS.readFile chart_location
return
$ responseLBS statusOK [("Content-Type", "image/png")] s
_ -> return
$ responseLBS statusOK [("Content-Type", "text/plain")]
$ "the server is here, but the service url is wrong"
|
jwaldmann/mex
|
src/Server.hs
|
gpl-3.0
| 1,654 | 0 | 17 | 467 | 453 | 242 | 211 | 48 | 4 |
quicksort [] = []
quicksort (p:xs) = (quicksort lesser) ++ [p] ++ (quicksort greater)
where (lesser, greater) = partition (< p) xs
|
BaReinhard/Hacktoberfest-Data-Structure-and-Algorithms
|
algorithms/quick_sort/haskell/quicksort.hs
|
gpl-3.0
| 134 | 0 | 8 | 25 | 74 | 39 | 35 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SSM.DescribeAssociation
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes the associations for the specified configuration document or
-- instance.
--
-- <http://docs.aws.amazon.com/ssm/latest/APIReference/API_DescribeAssociation.html>
module Network.AWS.SSM.DescribeAssociation
(
-- * Request
DescribeAssociation
-- ** Request constructor
, describeAssociation
-- ** Request lenses
, daInstanceId
, daName
-- * Response
, DescribeAssociationResponse
-- ** Response constructor
, describeAssociationResponse
-- ** Response lenses
, darAssociationDescription
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SSM.Types
import qualified GHC.Exts
data DescribeAssociation = DescribeAssociation
{ _daInstanceId :: Text
, _daName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeAssociation' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'daInstanceId' @::@ 'Text'
--
-- * 'daName' @::@ 'Text'
--
describeAssociation :: Text -- ^ 'daName'
-> Text -- ^ 'daInstanceId'
-> DescribeAssociation
describeAssociation p1 p2 = DescribeAssociation
{ _daName = p1
, _daInstanceId = p2
}
-- | The ID of the instance.
daInstanceId :: Lens' DescribeAssociation Text
daInstanceId = lens _daInstanceId (\s a -> s { _daInstanceId = a })
-- | The name of the configuration document.
daName :: Lens' DescribeAssociation Text
daName = lens _daName (\s a -> s { _daName = a })
newtype DescribeAssociationResponse = DescribeAssociationResponse
{ _darAssociationDescription :: Maybe AssociationDescription
} deriving (Eq, Read, Show)
-- | 'DescribeAssociationResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'darAssociationDescription' @::@ 'Maybe' 'AssociationDescription'
--
describeAssociationResponse :: DescribeAssociationResponse
describeAssociationResponse = DescribeAssociationResponse
{ _darAssociationDescription = Nothing
}
-- | Information about the association.
darAssociationDescription :: Lens' DescribeAssociationResponse (Maybe AssociationDescription)
darAssociationDescription =
lens _darAssociationDescription
(\s a -> s { _darAssociationDescription = a })
instance ToPath DescribeAssociation where
toPath = const "/"
instance ToQuery DescribeAssociation where
toQuery = const mempty
instance ToHeaders DescribeAssociation
instance ToJSON DescribeAssociation where
toJSON DescribeAssociation{..} = object
[ "Name" .= _daName
, "InstanceId" .= _daInstanceId
]
instance AWSRequest DescribeAssociation where
type Sv DescribeAssociation = SSM
type Rs DescribeAssociation = DescribeAssociationResponse
request = post "DescribeAssociation"
response = jsonResponse
instance FromJSON DescribeAssociationResponse where
parseJSON = withObject "DescribeAssociationResponse" $ \o -> DescribeAssociationResponse
<$> o .:? "AssociationDescription"
|
dysinger/amazonka
|
amazonka-ssm/gen/Network/AWS/SSM/DescribeAssociation.hs
|
mpl-2.0
| 4,078 | 0 | 9 | 870 | 503 | 305 | 198 | 64 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Admin.Customers.Chrome.Printers.BatchDeletePrinters
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes printers in batch.
--
-- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @admin.customers.chrome.printers.batchDeletePrinters@.
module Network.Google.Resource.Admin.Customers.Chrome.Printers.BatchDeletePrinters
(
-- * REST Resource
CustomersChromePrintersBatchDeletePrintersResource
-- * Creating a Request
, customersChromePrintersBatchDeletePrinters
, CustomersChromePrintersBatchDeletePrinters
-- * Request Lenses
, ccpbdpParent
, ccpbdpXgafv
, ccpbdpUploadProtocol
, ccpbdpAccessToken
, ccpbdpUploadType
, ccpbdpPayload
, ccpbdpCallback
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @admin.customers.chrome.printers.batchDeletePrinters@ method which the
-- 'CustomersChromePrintersBatchDeletePrinters' request conforms to.
type CustomersChromePrintersBatchDeletePrintersResource
=
"admin" :>
"directory" :>
"v1" :>
Capture "parent" Text :>
"chrome" :>
"printers:batchDeletePrinters" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] BatchDeletePrintersRequest :>
Post '[JSON] BatchDeletePrintersResponse
-- | Deletes printers in batch.
--
-- /See:/ 'customersChromePrintersBatchDeletePrinters' smart constructor.
data CustomersChromePrintersBatchDeletePrinters =
CustomersChromePrintersBatchDeletePrinters'
{ _ccpbdpParent :: !Text
, _ccpbdpXgafv :: !(Maybe Xgafv)
, _ccpbdpUploadProtocol :: !(Maybe Text)
, _ccpbdpAccessToken :: !(Maybe Text)
, _ccpbdpUploadType :: !(Maybe Text)
, _ccpbdpPayload :: !BatchDeletePrintersRequest
, _ccpbdpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomersChromePrintersBatchDeletePrinters' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccpbdpParent'
--
-- * 'ccpbdpXgafv'
--
-- * 'ccpbdpUploadProtocol'
--
-- * 'ccpbdpAccessToken'
--
-- * 'ccpbdpUploadType'
--
-- * 'ccpbdpPayload'
--
-- * 'ccpbdpCallback'
customersChromePrintersBatchDeletePrinters
:: Text -- ^ 'ccpbdpParent'
-> BatchDeletePrintersRequest -- ^ 'ccpbdpPayload'
-> CustomersChromePrintersBatchDeletePrinters
customersChromePrintersBatchDeletePrinters pCcpbdpParent_ pCcpbdpPayload_ =
CustomersChromePrintersBatchDeletePrinters'
{ _ccpbdpParent = pCcpbdpParent_
, _ccpbdpXgafv = Nothing
, _ccpbdpUploadProtocol = Nothing
, _ccpbdpAccessToken = Nothing
, _ccpbdpUploadType = Nothing
, _ccpbdpPayload = pCcpbdpPayload_
, _ccpbdpCallback = Nothing
}
-- | Required. The name of the customer. Format: customers\/{customer_id}
ccpbdpParent :: Lens' CustomersChromePrintersBatchDeletePrinters Text
ccpbdpParent
= lens _ccpbdpParent (\ s a -> s{_ccpbdpParent = a})
-- | V1 error format.
ccpbdpXgafv :: Lens' CustomersChromePrintersBatchDeletePrinters (Maybe Xgafv)
ccpbdpXgafv
= lens _ccpbdpXgafv (\ s a -> s{_ccpbdpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ccpbdpUploadProtocol :: Lens' CustomersChromePrintersBatchDeletePrinters (Maybe Text)
ccpbdpUploadProtocol
= lens _ccpbdpUploadProtocol
(\ s a -> s{_ccpbdpUploadProtocol = a})
-- | OAuth access token.
ccpbdpAccessToken :: Lens' CustomersChromePrintersBatchDeletePrinters (Maybe Text)
ccpbdpAccessToken
= lens _ccpbdpAccessToken
(\ s a -> s{_ccpbdpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ccpbdpUploadType :: Lens' CustomersChromePrintersBatchDeletePrinters (Maybe Text)
ccpbdpUploadType
= lens _ccpbdpUploadType
(\ s a -> s{_ccpbdpUploadType = a})
-- | Multipart request metadata.
ccpbdpPayload :: Lens' CustomersChromePrintersBatchDeletePrinters BatchDeletePrintersRequest
ccpbdpPayload
= lens _ccpbdpPayload
(\ s a -> s{_ccpbdpPayload = a})
-- | JSONP
ccpbdpCallback :: Lens' CustomersChromePrintersBatchDeletePrinters (Maybe Text)
ccpbdpCallback
= lens _ccpbdpCallback
(\ s a -> s{_ccpbdpCallback = a})
instance GoogleRequest
CustomersChromePrintersBatchDeletePrinters
where
type Rs CustomersChromePrintersBatchDeletePrinters =
BatchDeletePrintersResponse
type Scopes
CustomersChromePrintersBatchDeletePrinters
=
'["https://www.googleapis.com/auth/admin.chrome.printers"]
requestClient
CustomersChromePrintersBatchDeletePrinters'{..}
= go _ccpbdpParent _ccpbdpXgafv _ccpbdpUploadProtocol
_ccpbdpAccessToken
_ccpbdpUploadType
_ccpbdpCallback
(Just AltJSON)
_ccpbdpPayload
directoryService
where go
= buildClient
(Proxy ::
Proxy
CustomersChromePrintersBatchDeletePrintersResource)
mempty
|
brendanhay/gogol
|
gogol-admin-directory/gen/Network/Google/Resource/Admin/Customers/Chrome/Printers/BatchDeletePrinters.hs
|
mpl-2.0
| 6,208 | 0 | 20 | 1,409 | 792 | 461 | 331 | 126 | 1 |
import Hastistics
import Hastistics.Distributions
import Hastistics.Types hiding ((-))
import Hastistics.Data.CSV
simpleReport :: HSTable t => t -> HSReport
simpleReport t = select $
avgOf "Punkte" $ avgOf "Note" $
from t
genderReport :: HSTable t => t -> HSReport
genderReport t = select $
valueOf "Geschlecht" $ avgOf "Note" $ avgOf "Punkte" $
groupBy "Geschlecht" $
from t
genderGroupe :: HSTable t => t -> HSReport
genderGroupe t = select $
valueOf "Geschlecht" $ valueOf "Note" $
groupBy "Geschlecht" $
byrow $
from t
genderCount :: HSTable t => t -> HSReport
genderCount t = select $
valueOf "Geschlecht" $ count $
groupBy "Geschlecht" $
from t
sampleReport :: HSTable t => t -> HSReport
sampleReport t = select $
valueOf "Name" $ valueOf "Vorname" $ valueOf "Stichprobe" $
cust "Wst. fuer Testat" probabilityForTestat $
byrow $
from t
probabilityForTestat :: HSValue -> HSRow -> HSValue
probabilityForTestat _ r = HSDouble (1 - (hygecdf (2 - s) 10 2 7))
where s = fromHSIntegerToInteger(fieldValueOf "Stichprobe" r)
klassenDaten s = csvTable [toHSString, toHSString, toHSString, toHSInteger, toHSDouble, toHSDouble] s
main :: IO ()
main = do dat <- readFile "chrg.csv"
--print (simpleReport (klassenDaten dat))
--print (genderCount (genderGroupe (klassenDaten dat)))
--print (sampleReport (klassenDaten dat))
print (genderReport (klassenDaten dat))
|
fluescher/hastistics
|
tests/report.hs
|
lgpl-3.0
| 1,758 | 0 | 11 | 607 | 449 | 222 | 227 | 37 | 1 |
{-#LANGUAGE DeriveDataTypeable#-}
-----------------------------------------------------------------------------
-- |
-- Module : Bindings.Verba.ErrorCodes
-- Copyright : (c) Macil 2014
-- License : PublicDomain
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unportable
--
-- FFI interface
--
-----------------------------------------------------------------------------
module Bindings.Verba.ErrorCodes where
import Data.Typeable (Typeable)
data VerbaError = VerbaError Int String String deriving (Eq, Show, Typeable)
verr :: Int -> VerbaError
verr 0 = VerbaError 0 "NO_ERROR" "Нет ошибки. Функция завершилась успешно"
verr 1 = VerbaError 1 "E_NO_MEM" "Не хватает динамической памяти"
verr 2 = VerbaError 2 "E_CONTROL" "Сбой криптографической функции или искажение тела библиотеки"
verr 3 = VerbaError 3 "E_DRIVER" "Ошибка датчика случайных чисел"
verr 4 = VerbaError 4 "E_IMMITO" "Не совпадает имитовставка - файл (блок памяти) искажен"
verr 6 = VerbaError 6 "E_KEY_NOT_FOUND" "Ключ не найден (или искажен)"
verr 7 = VerbaError 7 "E_PARAM" "Ошибка параметра обращения к функции"
verr 8 = VerbaError 8 "E_INIT" "Ошибка инициализации"
verr 10 = VerbaError 10 "E_MEM_LENGTH" "Неверная длина блока памяти"
verr 11 = VerbaError 11 "E_MEM_NOT_ENCRYPTED" "Попытка расшифровать незашифрованный блок памяти"
verr 12 = VerbaError 12 "E_MEM_NOT_SIGNED" "Попытка проверить подпись неподписанного блока памяти"
verr 21 = VerbaError 21 "E_OPEN_IN_FILE" "Ошибка открытия входного файла"
verr 22 = VerbaError 22 "E_OPEN_OUT_FILE" "Ошибка открытия выходного файла"
verr 23 = VerbaError 23 "E_WRITE_FILE" "Ошибка записи файла"
verr 24 = VerbaError 24 "E_READ_FILE" "Ошибка чтения файла"
verr 25 = VerbaError 25 "E_RENAME_FILE" "Ошибка переименования файла"
verr 26 = VerbaError 26 "E_FILE_LENGTH" "Неверная (например, нулевая) длина файла"
verr 27 = VerbaError 27 "E_SRC" "Несовпадение контрольной суммы зашифрованного файла"
verr 29 = VerbaError 29 "E_FILE_NOT_ENCRYPTED" "Попытка расшифрования незашифрованного файла"
verr 30 = VerbaError 30 "E_FILE_NOT_SIGNED" "Попытка проверки подписи неподписанного файла"
verr 31 = VerbaError 31 "E_SEEK" "Ошибка смещения файлового указателя"
verr 32 = VerbaError 32 "E_CLOSE" "Ошибка закрытия файла"
verr 33 = VerbaError 33 "E_DELETE_FILE" "Ошибка удаления файла"
verr 34 = VerbaError 34 "E_GK" "Ошибка при чтении GK"
verr 35 = VerbaError 35 "E_KC" "Ошибка при чтении KC"
verr 36 = VerbaError 36 "E_DEVICE" "Ошибка при обращении к сменному ключевому устройству"
verr 37 = VerbaError 37 "E_REDEFINE" "Попытка перезаписи ключа в драйвер ASYNCR"
verr 38 = VerbaError 38 "E_NO_FREE_SLOTS" "В драйвере ASYNCR нет свободных \"слотов\""
verr 39 = VerbaError 39 "E_KEY_NOT_SET" "Ошибка при загрузке ключа в драйвер ASYNCR"
verr 101 = VerbaError 101 "ERR_NUMP" "Номер ключа NUM или NUMP не соответствует считанному из драйвера Asyncr"
verr 102 = VerbaError 102 "ERR_HASH" "Значение хэш-функции не совпало"
verr 103 = VerbaError 103 "ERR_OPEN_SPR" "Ошибка при открытии файла со справочником открытых ключей"
verr 104 = VerbaError 104 "ERR_OPEN_IMM" "Ошибка при открытии файла с имитовставками"
verr 105 = VerbaError 105 "ERR_UZ" "Ошибка чтения UZ"
verr 106 = VerbaError 106 "ERR_CKD" "Ошибка чтения CKD или CKDI"
verr 107 = VerbaError 107 "ERR_IMM_SPR" "Длина файла со справочником не соответствует длине файла с имитовставками"
verr 108 = VerbaError 108 "ERR_READ_SPR" "Ошибка чтения файла со справочником открытых ключей"
verr 109 = VerbaError 109 "ERR_WRITE_SPR" "Ошибка записи в файл со справочником открытых ключей"
verr 110 = VerbaError 110 "ERR_READ_IMM" "Ошибка чтения файла с имитовставками"
verr 111 = VerbaError 111 "ERR_IMM" "Имитовставка неверна"
verr 112 = VerbaError 112 "ERR_COMPROM" "Открытый ключ скомпрометирован"
verr 113 = VerbaError 113 "ERR_CRE_DIR" "Ошибка при создании каталога"
verr 114 = VerbaError 114 "ERR_CRE_FILE" "Ошибка при создании файла *.imm, *.imp, или *.spr"
verr 115 = VerbaError 115 "ERR_EXIST_SPR" "В заданном каталоге уже существует файл *.spr"
verr 116 = VerbaError 116 "ERR_WRITE_IMM" "Ошибка записи в файл имитовставок"
verr 117 = VerbaError 117 "ERR_NO_KEY" "Указанный открытый ключ отсутствует в справочнике"
verr 118 = VerbaError 118 "ERR_LENGTH" "Неверная длина файла *.imm, *.imp, или *.spr"
verr 119 = VerbaError 119 "ERR_OPEN_TMP" "Ошибка открытия временного файла"
verr 120 = VerbaError 120 "ERR_SPR_EMPTY" "Справочник открытых ключей пуст"
verr 121 = VerbaError 121 "ERR_KEY_HEAD" "Заголовок открытого ключа искажен"
verr 122 = VerbaError 122 "ERR_FIND_SPR" "Справочник открытых ключей не найден"
verr 123 = VerbaError 123 "ERR_NO_RES" "Открытый ключ не является резервным"
verr 124 = VerbaError 124 "ERR_IMM_HEAD" "Заголовок файла с имитовставками искажен"
verr 125 = VerbaError 125 "ERR_NO_SIGN" "Нет имитовставки на открытый ключ"
verr 126 = VerbaError 126 "ERR_NO_IMM" "Нет имитовставки на открытый ключ"
verr 127 = VerbaError 127 "ERR_FLOP" "Ошибка при обращении к гибкому диску"
verr unk = VerbaError unk ("_E_UNK" ++ (show unk)) ("Неизвестная ошибка (код " ++ show unk ++ ")")
|
Macil-dev/verhface-ll
|
src/Bindings/Verba/ErrorCodes.hs
|
unlicense
| 7,482 | 0 | 9 | 1,315 | 950 | 458 | 492 | 62 | 1 |
-- the Out-of-the-Tarpit example in Haskell and Project:M36
{-# LANGUAGE DeriveAnyClass, DeriveGeneric, OverloadedStrings, DerivingVia #-}
import ProjectM36.Client
import ProjectM36.DataTypes.Primitive
import ProjectM36.Tupleable
import ProjectM36.Relation
import ProjectM36.Error
import Data.Either
import GHC.Generics
import Control.DeepSeq
import qualified Data.Text as T
import Data.Time.Calendar
import Data.Proxy
import Codec.Winery
--create various database value (atom) types
type Price = Double
type Name = T.Text
type Address = T.Text
data RoomType = Kitchen | Bathroom | LivingRoom
deriving (Generic, Atomable, Eq, Show, NFData)
deriving Serialise via WineryVariant RoomType
data PriceBand = Low | Medium | High | Premium
deriving (Generic, Atomable, Eq, Show, NFData)
deriving Serialise via WineryVariant PriceBand
data AreaCode = City | Suburban | Rural
deriving (Generic, Atomable, Eq, Show, NFData)
deriving Serialise via WineryVariant AreaCode
data SpeedBand = VeryFastBand | FastBand | MediumBand | SlowBand
deriving (Generic, Atomable, Eq, Show, NFData)
deriving Serialise via WineryVariant SpeedBand
main :: IO ()
main = do
--connect to the database
let connInfo = InProcessConnectionInfo NoPersistence emptyNotificationCallback []
check x = case x of
Left err -> error (show err)
Right x' -> x'
eConn <- connectProjectM36 connInfo
let conn = check eConn
--create a database session at the default branch of the fresh database
eSessionId <- createSessionAtHead conn "master"
let sessionId = check eSessionId
createSchema sessionId conn
insertSampleData sessionId conn
data Property = Property {
address :: T.Text,
price :: Price,
photo :: T.Text,
dateRegistered :: Day
}
deriving (Generic, Eq, Show)
instance Tupleable Property
data Offer = Offer {
offerAddress :: Address,
offerPrice :: Price,
offerDate :: Day,
bidderName :: Name,
bidderAddress :: Address,
decisionDate :: Day,
accepted :: Bool
}
deriving (Generic, Eq)
instance Tupleable Offer
data Decision = Decision {
decAddress :: Address,
decOfferDate :: Day, --the dec prefix is needed until OverloadedRecordFields is available
decBidderName :: Name,
decBidderAddress :: Address,
decDecisionDate :: Day,
decAccepted :: Bool
}
deriving (Generic, Eq)
instance Tupleable Decision
data Room = Room {
roomAddress :: Address,
roomName :: Name,
width :: Double,
breadth :: Double,
roomType :: RoomType
}
deriving (Generic, Eq)
instance Tupleable Room
data Floor = Floor {
floorAddress :: Address,
floorRoomName :: Name,
floorNum :: Integer
}
deriving (Generic, Eq)
instance Tupleable Floor
data Commission = Commission {
priceBand :: PriceBand,
areaCode :: AreaCode,
saleSpeed :: SpeedBand,
commission :: Price
} deriving (Generic, Eq)
instance Tupleable Commission
createSchema :: SessionId -> Connection -> IO ()
createSchema sessionId conn = do
--create attributes for relvars
let
--create uniqueness constraints
incDepKeys = map (uncurry databaseContextExprForUniqueKey)
[("property", ["address"]),
("offer", ["offerAddress", "offerDate", "bidderName", "bidderAddress"]),
("decision", ["decAddress", "decOfferDate", "decBidderName", "decBidderAddress"]),
("room", ["roomAddress", "roomName"]),
("floor", ["floorAddress", "floorRoomName"]),
--"commision" misspelled in OotT
("commission", ["priceBand", "areaCode", "saleSpeed"])
]
--create foreign key constraints
foreignKeys = [("offer_property_fk",
("offer", ["offerAddress"]),
("property", ["address"])),
("decision_offer_fk",
("decision", ["decAddress", "decOfferDate", "decBidderName", "decBidderAddress"]),
("offer", ["offerAddress", "offerDate", "bidderName", "bidderAddress"])),
("room_property_fk",
("room", ["roomAddress"]),
("property", ["address"])),
("floor_property_fk",
("floor", ["floorAddress"]),
("property", ["address"]))
]
incDepForeignKeys = map (\(n, a, b) -> databaseContextExprForForeignKey n a b) foreignKeys
--define the relvars
rvExprs = [toDefineExpr (Proxy :: Proxy Property) "property",
toDefineExpr (Proxy :: Proxy Offer) "offer",
toDefineExpr (Proxy :: Proxy Decision) "decision",
toDefineExpr (Proxy :: Proxy Room) "room",
toDefineExpr (Proxy :: Proxy Floor) "floor",
toDefineExpr (Proxy :: Proxy Commission) "commission"]
--create the new algebraic data types
new_adts = [toAddTypeExpr (Proxy :: Proxy RoomType),
toAddTypeExpr (Proxy :: Proxy PriceBand),
toAddTypeExpr (Proxy :: Proxy AreaCode),
toAddTypeExpr (Proxy :: Proxy SpeedBand)]
--create the stored atom functions
priceBandScript = "(\\(DoubleAtom price:_) -> do\n let band = if price < 10000.0 then \"Low\" else if price < 20000.0 then \"Medium\" else if price < 30000.0 then \"High\" else \"Premium\"\n let aType = ConstructedAtomType \"PriceBand\" empty\n pure (ConstructedAtom band aType [])) :: [Atom] -> Either AtomFunctionError Atom"
areaCodeScript = "(\\(TextAtom address:_) -> let aType = ConstructedAtomType \"AreaCode\" empty in if address == \"90210\" then pure (ConstructedAtom \"City\" aType []) else pure (ConstructedAtom \"Rural\" aType [])) :: [Atom] -> Either AtomFunctionError Atom"
speedBandScript = "(\\(DayAtom d1:DayAtom d2:_) -> do\n let aType = ConstructedAtomType \"SpeedBand\" empty\n (_, month1, _) = toGregorian d1\n (_, month2, _) = toGregorian d2\n if month1 == 11 && month2 == 11 then pure (ConstructedAtom \"VeryFast\" aType []) else pure (ConstructedAtom \"MediumBand\" aType [])) :: [Atom] -> Either AtomFunctionError Atom"
atomFuncs = [createScriptedAtomFunction "priceBandForPrice" [doubleTypeConstructor] (ADTypeConstructor "PriceBand" []) priceBandScript,
createScriptedAtomFunction "areaCodeForAddress" [textTypeConstructor] (ADTypeConstructor "AreaCode" []) areaCodeScript,
createScriptedAtomFunction "datesToSpeedBand" [dayTypeConstructor, dayTypeConstructor] (ADTypeConstructor "SpeedBand" []) speedBandScript
]
--gather up and execute all database updates
putStrLn "load relvars"
_ <- handleIOErrors $ mapM (executeDatabaseContextExpr sessionId conn) (new_adts ++ rvExprs ++ incDepKeys ++ incDepForeignKeys)
putStrLn "load atom functions"
_ <- handleIOErrors $ mapM (executeDatabaseContextIOExpr sessionId conn) atomFuncs
pure ()
insertSampleData :: SessionId -> Connection -> IO ()
insertSampleData sessionId conn = do
--insert a bunch of records
putStrLn "load data"
let properties = [Property { address = "123 Main St.",
price = 200000,
photo = "123_main.jpg",
dateRegistered = fromGregorian 2016 4 3},
Property { address = "456 Main St.",
price = 150000,
photo = "456_main.jpg",
dateRegistered = fromGregorian 2016 5 6}]
insertPropertiesExpr <- handleError $ toInsertExpr properties "property"
handleIOError $ executeDatabaseContextExpr sessionId conn insertPropertiesExpr
let offers = [Offer { offerAddress = "123 Main St.",
offerPrice = 180000,
offerDate = fromGregorian 2017 1 2,
bidderName = "Steve",
bidderAddress = "789 Main St.",
decisionDate = fromGregorian 2017 2 2,
accepted = False }]
insertOffersExpr <- handleError $ toInsertExpr offers "offer"
handleIOError $ executeDatabaseContextExpr sessionId conn insertOffersExpr
let rooms = [Room { roomAddress = "123 Main St.",
roomName = "Fabulous Kitchen",
width = 10,
breadth = 10,
roomType = Kitchen },
Room { roomAddress = "123 Main St.",
roomName = "Clean Bathroom",
width = 7,
breadth = 5,
roomType = Bathroom }]
insertRoomsExpr <- handleError $ toInsertExpr rooms "room"
handleIOError $ executeDatabaseContextExpr sessionId conn insertRoomsExpr
let decisions = [Decision { decAddress = "123 Main St.",
decOfferDate = fromGregorian 2017 1 2,
decBidderName = "Steve",
decBidderAddress = "789 Main St.",
decDecisionDate = fromGregorian 2017 05 04,
decAccepted = False }]
insertDecisionsExpr <- handleError $ toInsertExpr decisions "decision"
handleIOError $ executeDatabaseContextExpr sessionId conn insertDecisionsExpr
let floors = [Floor { floorAddress = "123 Main St.",
floorRoomName = "Bathroom",
floorNum = 1
}]
insertFloorsExpr <- handleError $ toInsertExpr floors "floor"
handleIOError $ executeDatabaseContextExpr sessionId conn insertFloorsExpr
let commissions = [Commission { priceBand = Medium,
areaCode = City,
saleSpeed = MediumBand,
commission = 10000 }]
insertCommissionsExpr <- handleError $ toInsertExpr commissions "commission"
handleIOError $ executeDatabaseContextExpr sessionId conn insertCommissionsExpr
--query some records, marshal them back to Haskell
properties' <- handleIOError $ executeRelationalExpr sessionId conn (RelationVariable "property" ())
props <- toList properties' >>= mapM (handleError . fromTuple) :: IO [Property]
print props
handleError :: Either RelationalError a -> IO a
handleError eErr = case eErr of
Left err -> print err >> error "Died due to errors."
Right v -> pure v
handleIOError :: IO (Either RelationalError a) -> IO a
handleIOError m = do
e <- m
handleError e
handleIOErrors :: IO [Either RelationalError a] -> IO [a]
handleIOErrors m = do
eErrs <- m
case lefts eErrs of
[] -> pure (rights eErrs)
errs -> handleError (Left (someErrors errs))
|
agentm/project-m36
|
examples/out_of_the_tarpit.hs
|
unlicense
| 11,289 | 0 | 15 | 3,516 | 2,290 | 1,257 | 1,033 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module P5 where
import Data.Monoid ((<>))
import Control.Lens
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.Writer
import qualified Data.Text as T
import qualified Data.Text.IO as T
import P0
import P4
{-# ANN module ("HLint: ignore Reduce duplication" :: String) #-}
-- 35:38
loadFromDbIO
:: (MonadError e m, MonadReader r m,
AsDbError e, HasDbConfig r,
MonadIO m)
=> m MyData
loadFromDbIO = do
dbc <- ask
let c = view dbConn dbc
s = view dbSchema dbc
if c == "BAD" then
throwError $ review _QueryError s
else do
liftIO $ T.putStr ("loadFromDb " <> c <> " " <> s <> "> ")
liftIO T.getLine
loadFromDb
:: (MonadError e m, MonadReader r m,
AsDbError e, HasDbConfig r,
Monad m)
=> m MyData
loadFromDb = do
dbc <- ask
let c = view dbConn dbc
s = view dbSchema dbc
if c == "BAD" then
throwError $ review _QueryError s
else
return ("loadFromDb " <> c <> " " <> s <> " : 'HCData'")
sendOverNetIO
:: (MonadError e m, MonadReader r m,
AsNetworkError e, HasNetworkConfig r,
MonadIO m)
=> MyData
-> m ()
sendOverNetIO x = do
nc <- ask
let p = view port nc
s = view ssl nc
if p == (-1) then
throwError $ review _Timeout s
else
liftIO $ T.putStrLn ("sendOverNet: " <> x <> " " <> T.pack (show p) <> " " <> s)
sendOverNet
:: (MonadError e m, MonadReader r m,
AsNetworkError e, HasNetworkConfig r,
Monad m)
=> MyData
-> m T.Text
sendOverNet x = do
nc <- ask
let p = view port nc
s = view ssl nc
if p == (-1) then
throwError $ review _Timeout s
else
return ("sendOverNet " <> T.pack (show p) <> " " <> s <> " : '" <> x <> "'")
-- this would not compile at the end of P1
loadAndSendIO
:: (MonadError e m, MonadReader r m,
AsNetworkError e, HasNetworkConfig r,
AsDbError e, HasDbConfig r,
MonadIO m)
=> m ()
loadAndSendIO = loadFromDbIO >>= sendOverNetIO
-- this would not compile at the end of P1
loadAndSend
:: (MonadError e m, MonadReader r m,
AsDbError e, HasDbConfig r,
AsNetworkError e, HasNetworkConfig r,
Monad m)
=> m T.Text
loadAndSend = loadFromDb >>= sendOverNet
-- 39:00
newtype AppIO a =
AppIO { unAppIO :: ReaderT AppConfig (ExceptT AppError IO) a }
deriving (Applicative, Functor, Monad, MonadIO,
MonadReader AppConfig,
MonadError AppError)
-- 39:00
newtype App m a =
App { unApp :: ReaderT AppConfig (ExceptT AppError m) a }
deriving (Applicative, Functor, Monad, MonadIO, -- remove MonadIO?
MonadReader AppConfig,
MonadError AppError)
------------------------------------------------------------------------------
-- HC
appIO :: AppIO ()
appIO = do
loadAndSendIO
loadAndSendIO
app :: App IO ()
app = do
loadAndSendIO
loadAndSendIO
appW :: App (Writer [T.Text]) T.Text
appW = do
r1 <- loadAndSend
r2 <- loadAndSend
return $ r1 <> " |||| " <> r2
runAppIO :: DbConfig -> NetworkConfig -> IO ()
runAppIO dbc nc = do
r <- runExceptT $
runReaderT (unAppIO appIO)
(AppConfig dbc nc)
T.putStrLn (T.pack $ show r)
runApp :: DbConfig -> NetworkConfig -> IO ()
runApp dbc nc = do
r <- runExceptT $
runReaderT (unApp app) -- only difference
(AppConfig dbc nc)
T.putStrLn (T.pack $ show r)
runAppW :: DbConfig -> NetworkConfig -> (Either AppError T.Text, [T.Text])
runAppW dbc nc =
runIdentity $
runWriterT $
runExceptT $
runReaderT (unApp appW)
(AppConfig dbc nc)
m1io,m2io,m3io :: IO ()
m1io = runAppIO dbcGood ncGood
m2io = runAppIO dbcBad ncGood
m3io = runAppIO dbcGood ncBad
m1,m2,m3 :: IO ()
m1 = runApp dbcGood ncGood
m2 = runApp dbcBad ncGood
m3 = runApp dbcGood ncBad
-- m1w,m2w,m3w :: IO ()
m1w = runAppW dbcGood ncGood
m2w = runAppW dbcBad ncGood
m3w = runAppW dbcGood ncBad
dbcGood = DbConfig "conn" "sche"
dbcBad = DbConfig "BAD" "sche for BAD"
ncGood = NetConfig 45 "xssl"
ncBad = NetConfig (-1) "xssl for -1"
------------------------------------------------------------------------------
-- 39:45
-- Abstractions > Concretions
-- Typeclass constraints stack up better than monolithic transformers
-- Lens gives compositional vocabulary for talking about data
-- talked about
-- - http://hackage.haskell.org/package/mtl
-- - http://lens.github.io
-- encourage looking at
-- - http://github.com/benkolera/talk-stacking-your-monads/
-- - http://hackage.haskell.org/package/hoist-error
-- makeClassy / makeClassyPrisms
-- - https://hackage.haskell.org/package/lens-4.13.2/docs/Control-Lens-TH.html
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/program-structure/2015-06-george-wilson-classy-optics/src/P5.hs
|
unlicense
| 4,994 | 0 | 18 | 1,340 | 1,431 | 745 | 686 | 137 | 2 |
module Main where
import Control.Monad
import Control.Monad.Error
import Control.Monad.Identity
import Control.Monad.Trans
import Data.Char
import Data.List
import Data.Maybe
import Error
import Expr
import Parser
import qualified Data.Map as DM
import System.Exit
import Text.Printf
type Eval a = ErrorT EvalError Identity a
type Checker = [Expr] -> Pos -> Eval ()
type StrictHandler = Env -> [(Value, Pos)] -> Eval Value
type LazyHandler = Env -> [Expr] -> Eval Value
data Builtin = Strict Checker StrictHandler
| Lazy Checker LazyHandler
builtins = DM.fromList [
("atom", (Strict (arguments 1) atom)),
("car", (Strict (arguments 1) car)),
("cdr", (Strict (arguments 1) cdr)),
("cons", (Strict (arguments 2) cons)),
("cond", (Lazy (manyArgumentsFrom 1) cond)),
("eq", (Strict (arguments 2) eq)),
("label", (Lazy (arguments 2) label)),
("lambda", (Lazy (arguments 2) lambda)),
("quote", (Lazy (arguments 1) quote)),
("+", (Strict (manyArgumentsFrom 2) add)),
("-", (Strict (manyArgumentsFrom 2) sub)),
("*", (Strict (manyArgumentsFrom 2) mul)),
("/", (Strict (manyArgumentsFrom 2) divide))]
atom env [(Number _, _)] = return $ Boolean True
atom env [(Boolean _, _)] = return $ Boolean True
atom env [(Symbol _, _)] = return $ Boolean True
atom env [(List [], _)] = return $ Boolean True
atom env _ = return $ Boolean False
car env [(list, p)] = do
xs <- asList (list, p)
when (xs == []) $ throwError $ ExpectListOfAtLeast 1 list p
return $ head xs
cdr env [(list, p)] = do
xs <- asList (list, p)
when (xs == []) $ throwError $ ExpectListOfAtLeast 1 list p
return $ List (tail xs)
cons env [(x, _), list] = do
xs <- asList list
return $ List (x:xs)
cond env ((L [caseCond, caseBody] _):rest) = do
caseCond' <- (eval env caseCond >>= \v -> (asBoolean (v, pos caseCond)))
if caseCond'
then eval env caseBody
else cond env rest
cond env (head:rest) =
throwError $ ExpectListOf 2 (quoteEval head) (pos head)
eq env [(a, _), (b, _)] | a == b = return $ Boolean True
| otherwise = return $ Boolean False
label env [atom, lambda] = do
name <- asName atom
closure <- eval (DM.delete name env) lambda
(params, body, env) <- asClosure (closure, pos lambda)
let env' = DM.insert name closure env
return $ Closure params body env'
lambda env [args, body] = do
names <- asNames args
return $ Closure names body env
quote env [expr] = return (quoteEval expr)
add env xs = do
nums <- asNumbers xs
return $ Number (sum nums)
sub env xs = do
nums <- asNumbers xs
return $ Number (head nums - (sum (tail nums)))
mul env xs = do
nums <- asNumbers xs
return $ Number (product nums)
divide env xs = do
let zero = filter (\(x, p) -> x == Number 0) (tail xs)
when (not $ zero /= []) $ throwError $ DivisionByZero (snd $ head zero)
nums <- asNumbers xs
let (a:bs) = nums
let result = foldl (\x y -> x `div` y) a bs
return $ Number result
asBoolean :: (Value, Pos) -> Eval Bool
asBoolean (Boolean x, _) = return x
asBoolean (x, p) = throwError $ ExpectBoolean x p
asString :: (Value, Pos) -> Eval String
asString (Symbol x, _) = return x
asString (x, p) = throwError $ ExpectSymbol x p
asNumber :: (Value, Pos) -> Eval Integer
asNumber (Number x, _) = return x
asNumber (x, p) = throwError $ ExpectNumber x p
asNumbers xs = mapM asNumber xs
asList :: (Value, Pos) -> Eval [Value]
asList (List x, p) = return x
asList (x, p) = throwError $ ExpectList x p
asClosure :: (Value, Pos) -> Eval ([String], Expr, Env)
asClosure (Closure args body env, _) = return (args, body, env)
asClosure (x, p) = throwError $ ExpectClosure x p
asName (A name _) = return name
asName x = throwError $ ExpectSymbol (quoteEval x) (pos x)
asNames (L xs _) = mapM asName xs
asNames x = throwError $ ExpectList (quoteEval x) (pos x)
arguments n args p =
when (n /= length args) $ throwError $ ExpectArguments n (length args) p
manyArgumentsFrom n args p =
when (n > length args) $ throwError $ ExpectArgumentsAtLeast n (length args) p
quoteEval :: Expr -> Value
quoteEval (A x _) = Symbol x
quoteEval (L x _) = List (map quoteEval x)
eval :: Env -> Expr -> Eval Value
eval env (A atom _) | atom == "true" = return $ Boolean True
eval env (A atom _) | atom == "false" = return $ Boolean False
eval env (A atom _) | all isDigit atom = return $ Number (read atom :: Integer)
eval env (A atom p) =
case (DM.lookup atom env) of
Just value -> return value
Nothing -> throwError $ UnboundIdentifier atom p
eval env (L (head@(A name _):tail) p) | name `DM.member` builtins = do
case fromJust $ DM.lookup name builtins of
Strict checker handler -> do
checker tail p
args <- mapM (eval env) tail
handler env (zip args (map pos tail))
Lazy checker handler -> do
checker tail p
handler env tail
eval env (L (head:tail) pos) = do
(params, body, closureEnv) <- (eval env head >>= \v -> (asClosure (v, pos)))
args <- mapM (eval env) tail
manyArgumentsFrom (length params) tail pos
let (named, rest) = splitAt (length params) args
let argsEnv = DM.fromList ((zip params named) ++ [("arguments", List rest)])
let env' = argsEnv `DM.union` closureEnv `DM.union` env
eval env' body
runEval env expr = do
case runIdentity (runErrorT (eval env expr)) of
Right val -> return val
Left error -> do putStrLn ("While executing " ++ (show (pos expr)) ++ ":")
putStrLn (show error)
exitFailure
isDefun :: Expr -> Bool
isDefun (L ((A "defun" _):_) _) = True
isDefun _ = False
defun :: Env -> Expr -> IO Env
defun env (L [A "defun" p, name@(A sname _), params, body] _) = do
let lambda = L [(A "lambda" p), params, body] p
let label = L [(A "label" p), name, lambda] p
val <- runEval env lambda
putStrLn (sname)
return $ DM.insert sname val env
main = do str <- getContents
case parseHisp str of
Left err -> print err
Right exprs -> do
let (defuns, rest) = partition isDefun exprs
env <- foldM defun DM.empty defuns
mapM_ (\x -> runEval env x >>= (putStrLn . show)) rest
|
ulan/hisp
|
level1/Hisp1.hs
|
unlicense
| 6,484 | 0 | 19 | 1,742 | 3,026 | 1,530 | 1,496 | 160 | 3 |
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
f(x) = _(x)
program = drawingOf(blank)
|
google/codeworld
|
codeworld-compiler/test/testcases/typedHoleWithRelevantBindings/source.hs
|
apache-2.0
| 650 | 0 | 6 | 122 | 30 | 16 | 14 | 2 | 1 |
-- |
-- Module: Network.Riak
-- Copyright: (c) 2011 MailRank, Inc.
-- License: Apache
-- Maintainer: Bryan O'Sullivan <[email protected]>
-- Stability: experimental
-- Portability: portable
--
-- A client for the Riak decentralized data store.
--
-- The functions in this module use JSON as the storage
-- representation, and automatically perform conflict resolution
-- during storage and retrieval.
--
-- This library is organized to allow a tradeoff between power
-- and ease of use. If you would like a different degree of
-- automation with storage and conflict resolution, you may want to
-- use one of the following modules (ranked from easiest to most
-- tricky to use):
--
-- [Network.Riak.JSON.Resolvable] JSON for storage, automatic conflict
-- resolution. (This module actually re-exports its definitions.)
-- This is the easiest module to work with.
--
-- [Network.Riak.JSON] JSON for storage, manual conflict resolution.
--
-- [Network.Riak.Value.Resolvable] More complex (but still automatic)
-- storage, automatic conflict resolution.
--
-- [Network.Riak.Value] More complex (but still automatic) storage,
-- manual conflict resolution.
--
-- [Network.Riak.Basic] manual storage, manual conflict resolution.
-- This is the most demanding module to work with, as you must encode
-- and decode data yourself, and handle all conflict resolution
-- yourself.
module Network.Riak
(
-- * Client configuration and identification
ClientID
, Client(..)
, defaultClient
, getClientID
-- * Connection management
, Connection(..)
, connect
, disconnect
, ping
, getServerInfo
-- * Data management
, Quorum(..)
, Resolvable(..)
, get
, getMany
, modify
, modify_
, delete
-- ** Low-level modification functions
, put
, putMany
-- * Metadata
, listBuckets
, foldKeys
, getBucket
, setBucket
-- * Map/reduce
, mapReduce
) where
import Network.Riak.Basic hiding (get, put, put_)
import Network.Riak.JSON.Resolvable (get, getMany, modify, modify_, put, putMany)
import Network.Riak.Resolvable (Resolvable(..))
|
bumptech/riak-haskell-client
|
src/Network/Riak.hs
|
apache-2.0
| 2,152 | 0 | 6 | 441 | 195 | 145 | 50 | 28 | 0 |
#!/usr/bin/env runhaskell
-- The classic 8-queens problem made famous by Wirth.
-- This version Colin Runciman, March 2000.
main =
if null solutions then putStrLn "no solution!"
else putStr (board (head solutions))
where
solutions = queens 8
queens :: Int -> [[Int]]
queens n = valid n n
valid :: Int -> Int -> [[Int]]
valid 0 n = [[]]
valid m n = filter safe (extend n (valid (m-1) n))
extend n b = cp (fromTo 1 n) b
cp :: [a] -> [[a]] -> [[a]]
cp [] y = []
cp (a:x) y = map (a:) y ++ cp x y
safe (a:b) = no_threat a b 1
no_threat a [] m = True
no_threat a (b:y) m =
a /= b && a+m /= b && a-m /= b && no_threat a y (m+1)
board :: [Int] -> String
board b =
unlines (concat (zipWith rank (from 1) b))
where
rank r qcol =
map line ["o o o", " \\|/ ", " === "]
where
line crown_slice =
concat (zipWith square (from 1) b)
where
square scol _ =
if scol == qcol then crown_slice
else if (scol `rem` (2::Int)) == (r `rem` (2::Int)) then "....."
else " "
-- in place of ..
from :: Int -> [Int]
from n = n : from (n+1)
fromTo :: Int -> Int -> [Int]
fromTo m n = if m > n then [] else m : fromTo (m+1) n
|
sergev/vak-opensource
|
languages/haskell/queens.hs
|
apache-2.0
| 1,188 | 8 | 16 | 337 | 614 | 326 | 288 | 32 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>HTTPS Info Add-on</title>
<maps>
<homeID>httpsinfo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçindekiler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İçerik</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriler</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
secdec/zap-extensions
|
addOns/httpsInfo/src/main/javahelp/org/zaproxy/zap/extension/httpsinfo/resources/help_tr_TR/helpset_tr_TR.hs
|
apache-2.0
| 975 | 77 | 67 | 157 | 419 | 211 | 208 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module GeneratedTests where
import Test.Tasty
import Test.Tasty.HUnit (assertEqual, testCase, (@?=))
import Data.String
import Proto3.Suite.DotProto.Generate
import Turtle hiding (err)
generatedTests :: TestTree
generatedTests = testGroup "Code generator tests"
[ testServerGeneration
, testClientGeneration ]
testServerGeneration :: TestTree
testServerGeneration = testCase "server generation" $ do
mktree hsTmpDir
mktree pyTmpDir
let args = CompileArgs
{ includeDir = ["tests"]
, extraInstanceFiles = []
, inputProto = "simple.proto"
, outputDir = hsTmpDir
}
compileDotProtoFileOrDie args
do exitCode <- proc "tests/simple-server.sh" [hsTmpDir] empty
exitCode @?= ExitSuccess
do exitCode <- proc "tests/protoc.sh" [pyTmpDir] empty
exitCode @?= ExitSuccess
runManaged $ do
serverExitCodeA <- fork (shell (hsTmpDir <> "/simple-server") empty)
clientExitCodeA <- fork
(export "PYTHONPATH" pyTmpDir >> shell "tests/test-client.sh" empty)
liftIO $ do
serverExitCode <- liftIO (wait serverExitCodeA)
clientExitCode <- liftIO (wait clientExitCodeA)
serverExitCode @?= ExitSuccess
clientExitCode @?= ExitSuccess
rmtree hsTmpDir
rmtree pyTmpDir
testClientGeneration :: TestTree
testClientGeneration = testCase "client generation" $ do
mktree hsTmpDir
mktree pyTmpDir
let args = CompileArgs
{ includeDir = ["tests"]
, extraInstanceFiles = []
, inputProto = "simple.proto"
, outputDir = hsTmpDir
}
compileDotProtoFileOrDie args
do exitCode <- proc "tests/simple-client.sh" [hsTmpDir] empty
exitCode @?= ExitSuccess
do exitCode <- proc "tests/protoc.sh" [pyTmpDir] empty
exitCode @?= ExitSuccess
runManaged $ do
serverExitCodeA <- fork
(export "PYTHONPATH" pyTmpDir >> shell "tests/test-server.sh" empty)
clientExitCodeA <- fork (shell (hsTmpDir <> "/simple-client") empty)
liftIO $ do
serverExitCode <- liftIO (wait serverExitCodeA)
clientExitCode <- liftIO (wait clientExitCodeA)
assertEqual "Server exit code" serverExitCode ExitSuccess
assertEqual "Client exit code" clientExitCode ExitSuccess
rmtree hsTmpDir
rmtree pyTmpDir
hsTmpDir, pyTmpDir :: IsString a => a
hsTmpDir = "tests/tmp"
pyTmpDir = "tests/py-tmp"
|
awakenetworks/gRPC-haskell
|
tests/GeneratedTests.hs
|
apache-2.0
| 2,389 | 0 | 17 | 504 | 607 | 296 | 311 | 64 | 1 |
{-# LANGUAGE Haskell2010 #-}
-- | This module tests that if we're trying to link to a /qualified/
-- identifier that's not in scope, we get an anchor as if it was a
-- variable. Previous behaviour was to treat it as a type constructor
-- so issue like #253 arose. Also see @rename@ function comments in
-- source.
module Bug253 where
-- | This link should generate @#v@ anchor: 'DoesNotExist.fakeFakeFake'
foo :: ()
foo = ()
|
haskell/haddock
|
html-test/src/Bug253.hs
|
bsd-2-clause
| 426 | 0 | 5 | 78 | 25 | 18 | 7 | 4 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Effects where
import Control.Applicative
import Control.Category
import Control.Lens
import Control.Monad.Reader as Reader
import qualified Data.IntMap.Strict as IntMap
import GHC.Generics
import Prelude hiding (Either(..), id, (.))
import Coord
import Entity
import GameM
import GameState
data Effect
= EffMoveTo Coord
| EffDamaged Int
| EffRecoverAP
| EffSpendAP Int
| EffDestroy
| EffPass
deriving (Show,Generic,Eq)
newtype EffectsToEntities = EffectsToEntities
{ getMap :: IntMap.IntMap [Effect]
} deriving ((Show))
instance Monoid EffectsToEntities where
mempty = EffectsToEntities IntMap.empty
mappend x y =
EffectsToEntities $ IntMap.unionWith (++) (getMap x) (getMap y)
-- The Monoid instance for IntMap does not append like I want
returnEffectsFor
:: Entity -> [Effect] -> EffectsToEntities
returnEffectsFor entity effects =
EffectsToEntities $ IntMap.singleton (entity ^. entityRef) effects
returnEffectsForRef :: EntityRef -> [Effect] -> EffectsToEntities
returnEffectsForRef entityref effects =
EffectsToEntities $ IntMap.singleton entityref effects
-----
applyEffectsToEntities
:: EffectsToEntities -> GameM GameState
applyEffectsToEntities effects = do
gameState <- ask
let gameEntities' =
IntMap.mergeWithKey
applyEffects
(const IntMap.empty)
id
(getMap effects)
(gameState ^. (currLevel . gameEntities))
gameState' = gameState & (currLevel . gameEntities) .~ gameEntities'
& (currLevel . cachedMap) .~ Nothing
return $ gameState'
applyEffects :: EntityRef -> [Effect] -> Entity -> Maybe Entity
applyEffects _ effects e = foldr applyEffect (Just e) effects
applyEffect :: Effect -> Maybe Entity -> Maybe Entity
applyEffect EffDestroy _ = Nothing
applyEffect EffRecoverAP e = pure recoverAP <*> e
applyEffect (EffSpendAP a) e = spendAP <$> e <*> pure a
applyEffect (EffDamaged dmg) e = do
e' <- applyDamage <$> e <*> pure dmg
if isDead e'
then Nothing
else Just e'
applyEffect (EffMoveTo pos) e = moveTo <$> e <*> pure pos
applyEffect EffPass e = pure spendAllAP <*> e
spendAllAP
:: Entity -> Entity
spendAllAP e = e & actor %~ (liftA spendAllAP')
where
spendAllAP' act = act & actionPoints .~ 0
spendAP :: Entity -> Int -> Entity
spendAP e apC = e & actor %~ (liftA spendAP')
where
spendAP' act = act & actionPoints -~ apC
recoverAP :: Entity -> Entity
recoverAP e = e & actor %~ (liftA recoverAP')
where
recoverAP' act = act & actionPoints +~ (act ^. speed)
moveTo :: Entity -> Coord -> Entity
moveTo e coord = e & position .~ coord
applyDamage :: Entity -> Int -> Entity
applyDamage e dmg = e & health %~ (liftA applyDamage')
where
applyDamage' hp = hp & currHP -~ dmg
|
fros1y/umbral
|
src/Effects.hs
|
bsd-3-clause
| 2,903 | 0 | 15 | 671 | 867 | 458 | 409 | 77 | 2 |
module HighOrdAluOps where
import CLasH.HardwareTypes
type Op n e = (Vector n e -> Vector n e -> Vector n e)
type Opcode = Bit
constant :: NaturalT n => e -> Op n e
constant e a b = vcopy e
invop :: Op n Bit
invop a b = vmap hwnot a
andop f a b = vzipWith f a b
-- Is any bit set?
anyset :: NaturalT n => (e -> e -> e) -> e -> Op n e
anyset f s a b = constant (f a' b') a b
where
a' = vfoldl f s a
b' = vfoldl f s b
|
christiaanb/clash
|
examples/HighOrdAluOps.hs
|
bsd-3-clause
| 433 | 0 | 9 | 126 | 221 | 112 | 109 | 13 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.PackageDescription.Check
-- Copyright : Lennart Kolmodin 2008
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This has code for checking for various problems in packages. There is one
-- set of checks that just looks at a 'PackageDescription' in isolation and
-- another set of checks that also looks at files in the package. Some of the
-- checks are basic sanity checks, others are portability standards that we'd
-- like to encourage. There is a 'PackageCheck' type that distinguishes the
-- different kinds of check so we can see which ones are appropriate to report
-- in different situations. This code gets uses when configuring a package when
-- we consider only basic problems. The higher standard is uses when when
-- preparing a source tarball and by Hackage when uploading new packages. The
-- reason for this is that we want to hold packages that are expected to be
-- distributed to a higher standard than packages that are only ever expected
-- to be used on the author's own environment.
module Distribution.PackageDescription.Check (
-- * Package Checking
PackageCheck(..),
checkPackage,
checkConfiguredPackage,
-- ** Checking package contents
checkPackageFiles,
checkPackageContent,
CheckPackageContentOps(..),
checkPackageFileNames,
) where
import Data.Maybe
( isNothing, isJust, catMaybes, maybeToList, fromMaybe )
import Data.List (sort, group, isPrefixOf, nub, find)
import Control.Monad
( filterM, liftM )
import qualified System.Directory as System
( doesFileExist, doesDirectoryExist )
import qualified Data.Map as Map
import Distribution.PackageDescription
import Distribution.PackageDescription.Configuration
( flattenPackageDescription, finalizePackageDescription )
import Distribution.Compiler
( CompilerFlavor(..), buildCompilerFlavor, CompilerId(..)
, unknownCompilerInfo, AbiTag(..) )
import Distribution.System
( OS(..), Arch(..), buildPlatform )
import Distribution.License
( License(..), knownLicenses )
import Distribution.Simple.CCompiler
( filenameCDialect )
import Distribution.Simple.Utils
( cabalVersion, intercalate, parseFileGlob, FileGlob(..), lowercase, startsWithBOM, fromUTF8 )
import Distribution.Version
( Version(..)
, VersionRange(..), foldVersionRange'
, anyVersion, noVersion, thisVersion, laterVersion, earlierVersion
, orLaterVersion, orEarlierVersion
, unionVersionRanges, intersectVersionRanges
, asVersionIntervals, UpperBound(..), isNoVersion )
import Distribution.Package
( PackageName(PackageName), packageName, packageVersion
, Dependency(..), pkgName )
import Distribution.Text
( display, disp )
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint ((<>), (<+>))
import qualified Language.Haskell.Extension as Extension (deprecatedExtensions)
import Language.Haskell.Extension
( Language(UnknownLanguage), knownLanguages
, Extension(..), KnownExtension(..) )
import qualified System.Directory (getDirectoryContents)
import System.IO (openBinaryFile, IOMode(ReadMode), hGetContents)
import System.FilePath
( (</>), takeExtension, isRelative, isAbsolute
, splitDirectories, splitPath, splitExtension )
import System.FilePath.Windows as FilePath.Windows
( isValid )
-- | Results of some kind of failed package check.
--
-- There are a range of severities, from merely dubious to totally insane.
-- All of them come with a human readable explanation. In future we may augment
-- them with more machine readable explanations, for example to help an IDE
-- suggest automatic corrections.
--
data PackageCheck =
-- | This package description is no good. There's no way it's going to
-- build sensibly. This should give an error at configure time.
PackageBuildImpossible { explanation :: String }
-- | A problem that is likely to affect building the package, or an
-- issue that we'd like every package author to be aware of, even if
-- the package is never distributed.
| PackageBuildWarning { explanation :: String }
-- | An issue that might not be a problem for the package author but
-- might be annoying or detrimental when the package is distributed to
-- users. We should encourage distributed packages to be free from these
-- issues, but occasionally there are justifiable reasons so we cannot
-- ban them entirely.
| PackageDistSuspicious { explanation :: String }
-- | Like PackageDistSuspicious but will only display warnings
-- rather than causing abnormal exit.
| PackageDistSuspiciousWarn { explanation :: String }
-- | An issue that is OK in the author's environment but is almost
-- certain to be a portability problem for other environments. We can
-- quite legitimately refuse to publicly distribute packages with these
-- problems.
| PackageDistInexcusable { explanation :: String }
deriving (Eq)
instance Show PackageCheck where
show notice = explanation notice
check :: Bool -> PackageCheck -> Maybe PackageCheck
check False _ = Nothing
check True pc = Just pc
checkSpecVersion :: PackageDescription -> [Int] -> Bool -> PackageCheck
-> Maybe PackageCheck
checkSpecVersion pkg specver cond pc
| specVersion pkg >= Version specver [] = Nothing
| otherwise = check cond pc
-- ------------------------------------------------------------
-- * Standard checks
-- ------------------------------------------------------------
-- | Check for common mistakes and problems in package descriptions.
--
-- This is the standard collection of checks covering all aspects except
-- for checks that require looking at files within the package. For those
-- see 'checkPackageFiles'.
--
-- It requires the 'GenericPackageDescription' and optionally a particular
-- configuration of that package. If you pass 'Nothing' then we just check
-- a version of the generic description using 'flattenPackageDescription'.
--
checkPackage :: GenericPackageDescription
-> Maybe PackageDescription
-> [PackageCheck]
checkPackage gpkg mpkg =
checkConfiguredPackage pkg
++ checkConditionals gpkg
++ checkPackageVersions gpkg
++ checkDevelopmentOnlyFlags gpkg
where
pkg = fromMaybe (flattenPackageDescription gpkg) mpkg
--TODO: make this variant go away
-- we should always know the GenericPackageDescription
checkConfiguredPackage :: PackageDescription -> [PackageCheck]
checkConfiguredPackage pkg =
checkSanity pkg
++ checkFields pkg
++ checkLicense pkg
++ checkSourceRepos pkg
++ checkGhcOptions pkg
++ checkCCOptions pkg
++ checkCPPOptions pkg
++ checkPaths pkg
++ checkCabalVersion pkg
-- ------------------------------------------------------------
-- * Basic sanity checks
-- ------------------------------------------------------------
-- | Check that this package description is sane.
--
checkSanity :: PackageDescription -> [PackageCheck]
checkSanity pkg =
catMaybes [
check (null . (\(PackageName n) -> n) . packageName $ pkg) $
PackageBuildImpossible "No 'name' field."
, check (null . versionBranch . packageVersion $ pkg) $
PackageBuildImpossible "No 'version' field."
, check (null (executables pkg) && isNothing (library pkg)) $
PackageBuildImpossible
"No executables and no library found. Nothing to do."
, check (not (null duplicateNames)) $
PackageBuildImpossible $ "Duplicate sections: " ++ commaSep duplicateNames
++ ". The name of every executable, test suite, and benchmark section in"
++ " the package must be unique."
]
--TODO: check for name clashes case insensitively: windows file systems cannot
--cope.
++ maybe [] (checkLibrary pkg) (library pkg)
++ concatMap (checkExecutable pkg) (executables pkg)
++ concatMap (checkTestSuite pkg) (testSuites pkg)
++ concatMap (checkBenchmark pkg) (benchmarks pkg)
++ catMaybes [
check (specVersion pkg > cabalVersion) $
PackageBuildImpossible $
"This package description follows version "
++ display (specVersion pkg) ++ " of the Cabal specification. This "
++ "tool only supports up to version " ++ display cabalVersion ++ "."
]
where
exeNames = map exeName $ executables pkg
testNames = map testName $ testSuites pkg
bmNames = map benchmarkName $ benchmarks pkg
duplicateNames = dups $ exeNames ++ testNames ++ bmNames
checkLibrary :: PackageDescription -> Library -> [PackageCheck]
checkLibrary pkg lib =
catMaybes [
check (not (null moduleDuplicates)) $
PackageBuildImpossible $
"Duplicate modules in library: "
++ commaSep (map display moduleDuplicates)
-- check use of required-signatures/exposed-signatures sections
, checkVersion [1,21] (not (null (requiredSignatures lib))) $
PackageDistInexcusable $
"To use the 'required-signatures' field the package needs to specify "
++ "at least 'cabal-version: >= 1.21'."
, checkVersion [1,21] (not (null (exposedSignatures lib))) $
PackageDistInexcusable $
"To use the 'exposed-signatures' field the package needs to specify "
++ "at least 'cabal-version: >= 1.21'."
]
where
checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck
checkVersion ver cond pc
| specVersion pkg >= Version ver [] = Nothing
| otherwise = check cond pc
moduleDuplicates = dups (libModules lib ++
map moduleReexportName (reexportedModules lib))
checkExecutable :: PackageDescription -> Executable -> [PackageCheck]
checkExecutable pkg exe =
catMaybes [
check (null (modulePath exe)) $
PackageBuildImpossible $
"No 'main-is' field found for executable " ++ exeName exe
, check (not (null (modulePath exe))
&& (not $ fileExtensionSupportedLanguage $ modulePath exe)) $
PackageBuildImpossible $
"The 'main-is' field must specify a '.hs' or '.lhs' file "
++ "(even if it is generated by a preprocessor), "
++ "or it may specify a C/C++/obj-C source file."
, checkSpecVersion pkg [1,17]
(fileExtensionSupportedLanguage (modulePath exe)
&& takeExtension (modulePath exe) `notElem` [".hs", ".lhs"]) $
PackageDistInexcusable $
"The package uses a C/C++/obj-C source file for the 'main-is' field. "
++ "To use this feature you must specify 'cabal-version: >= 1.18'."
, check (not (null moduleDuplicates)) $
PackageBuildImpossible $
"Duplicate modules in executable '" ++ exeName exe ++ "': "
++ commaSep (map display moduleDuplicates)
]
where
moduleDuplicates = dups (exeModules exe)
checkTestSuite :: PackageDescription -> TestSuite -> [PackageCheck]
checkTestSuite pkg test =
catMaybes [
case testInterface test of
TestSuiteUnsupported tt@(TestTypeUnknown _ _) -> Just $
PackageBuildWarning $
quote (display tt) ++ " is not a known type of test suite. "
++ "The known test suite types are: "
++ commaSep (map display knownTestTypes)
TestSuiteUnsupported tt -> Just $
PackageBuildWarning $
quote (display tt) ++ " is not a supported test suite version. "
++ "The known test suite types are: "
++ commaSep (map display knownTestTypes)
_ -> Nothing
, check (not $ null moduleDuplicates) $
PackageBuildImpossible $
"Duplicate modules in test suite '" ++ testName test ++ "': "
++ commaSep (map display moduleDuplicates)
, check mainIsWrongExt $
PackageBuildImpossible $
"The 'main-is' field must specify a '.hs' or '.lhs' file "
++ "(even if it is generated by a preprocessor), "
++ "or it may specify a C/C++/obj-C source file."
, checkSpecVersion pkg [1,17] (mainIsNotHsExt && not mainIsWrongExt) $
PackageDistInexcusable $
"The package uses a C/C++/obj-C source file for the 'main-is' field. "
++ "To use this feature you must specify 'cabal-version: >= 1.18'."
-- Test suites might be built as (internal) libraries named after
-- the test suite and thus their names must not clash with the
-- name of the package.
, check libNameClash $
PackageBuildImpossible $
"The test suite " ++ testName test
++ " has the same name as the package."
]
where
moduleDuplicates = dups $ testModules test
mainIsWrongExt = case testInterface test of
TestSuiteExeV10 _ f -> not $ fileExtensionSupportedLanguage f
_ -> False
mainIsNotHsExt = case testInterface test of
TestSuiteExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"]
_ -> False
libNameClash = testName test `elem` [ libName
| _lib <- maybeToList (library pkg)
, let PackageName libName =
pkgName (package pkg) ]
checkBenchmark :: PackageDescription -> Benchmark -> [PackageCheck]
checkBenchmark pkg bm =
catMaybes [
case benchmarkInterface bm of
BenchmarkUnsupported tt@(BenchmarkTypeUnknown _ _) -> Just $
PackageBuildWarning $
quote (display tt) ++ " is not a known type of benchmark. "
++ "The known benchmark types are: "
++ commaSep (map display knownBenchmarkTypes)
BenchmarkUnsupported tt -> Just $
PackageBuildWarning $
quote (display tt) ++ " is not a supported benchmark version. "
++ "The known benchmark types are: "
++ commaSep (map display knownBenchmarkTypes)
_ -> Nothing
, check (not $ null moduleDuplicates) $
PackageBuildImpossible $
"Duplicate modules in benchmark '" ++ benchmarkName bm ++ "': "
++ commaSep (map display moduleDuplicates)
, check mainIsWrongExt $
PackageBuildImpossible $
"The 'main-is' field must specify a '.hs' or '.lhs' file "
++ "(even if it is generated by a preprocessor)."
-- See comment for similar check on test suites.
, check libNameClash $
PackageBuildImpossible $
"The benchmark " ++ benchmarkName bm
++ " has the same name as the package."
]
where
moduleDuplicates = dups $ benchmarkModules bm
mainIsWrongExt = case benchmarkInterface bm of
BenchmarkExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"]
_ -> False
libNameClash = benchmarkName bm `elem` [ libName
| _lib <- maybeToList (library pkg)
, let PackageName libName =
pkgName (package pkg) ]
-- ------------------------------------------------------------
-- * Additional pure checks
-- ------------------------------------------------------------
checkFields :: PackageDescription -> [PackageCheck]
checkFields pkg =
catMaybes [
check (not . FilePath.Windows.isValid . display . packageName $ pkg) $
PackageDistInexcusable $
"Unfortunately, the package name '" ++ display (packageName pkg)
++ "' is one of the reserved system file names on Windows. Many tools "
++ "need to convert package names to file names so using this name "
++ "would cause problems."
, check (isNothing (buildType pkg)) $
PackageBuildWarning $
"No 'build-type' specified. If you do not need a custom Setup.hs or "
++ "./configure script then use 'build-type: Simple'."
, case buildType pkg of
Just (UnknownBuildType unknown) -> Just $
PackageBuildWarning $
quote unknown ++ " is not a known 'build-type'. "
++ "The known build types are: "
++ commaSep (map display knownBuildTypes)
_ -> Nothing
, check (isJust (setupBuildInfo pkg) && buildType pkg /= Just Custom) $
PackageBuildWarning $
"Ignoring the 'custom-setup' section because the 'build-type' is "
++ "not 'Custom'. Use 'build-type: Custom' if you need to use a "
++ "custom Setup.hs script."
, check (not (null unknownCompilers)) $
PackageBuildWarning $
"Unknown compiler " ++ commaSep (map quote unknownCompilers)
++ " in 'tested-with' field."
, check (not (null unknownLanguages)) $
PackageBuildWarning $
"Unknown languages: " ++ commaSep unknownLanguages
, check (not (null unknownExtensions)) $
PackageBuildWarning $
"Unknown extensions: " ++ commaSep unknownExtensions
, check (not (null languagesUsedAsExtensions)) $
PackageBuildWarning $
"Languages listed as extensions: "
++ commaSep languagesUsedAsExtensions
++ ". Languages must be specified in either the 'default-language' "
++ " or the 'other-languages' field."
, check (not (null deprecatedExtensions)) $
PackageDistSuspicious $
"Deprecated extensions: "
++ commaSep (map (quote . display . fst) deprecatedExtensions)
++ ". " ++ unwords
[ "Instead of '" ++ display ext
++ "' use '" ++ display replacement ++ "'."
| (ext, Just replacement) <- deprecatedExtensions ]
, check (null (category pkg)) $
PackageDistSuspicious "No 'category' field."
, check (null (maintainer pkg)) $
PackageDistSuspicious "No 'maintainer' field."
, check (null (synopsis pkg) && null (description pkg)) $
PackageDistInexcusable "No 'synopsis' or 'description' field."
, check (null (description pkg) && not (null (synopsis pkg))) $
PackageDistSuspicious "No 'description' field."
, check (null (synopsis pkg) && not (null (description pkg))) $
PackageDistSuspicious "No 'synopsis' field."
--TODO: recommend the bug reports URL, author and homepage fields
--TODO: recommend not using the stability field
--TODO: recommend specifying a source repo
, check (length (synopsis pkg) >= 80) $
PackageDistSuspicious
"The 'synopsis' field is rather long (max 80 chars is recommended)."
-- check use of impossible constraints "tested-with: GHC== 6.10 && ==6.12"
, check (not (null testedWithImpossibleRanges)) $
PackageDistInexcusable $
"Invalid 'tested-with' version range: "
++ commaSep (map display testedWithImpossibleRanges)
++ ". To indicate that you have tested a package with multiple "
++ "different versions of the same compiler use multiple entries, "
++ "for example 'tested-with: GHC==6.10.4, GHC==6.12.3' and not "
++ "'tested-with: GHC==6.10.4 && ==6.12.3'."
]
where
unknownCompilers = [ name | (OtherCompiler name, _) <- testedWith pkg ]
unknownLanguages = [ name | bi <- allBuildInfo pkg
, UnknownLanguage name <- allLanguages bi ]
unknownExtensions = [ name | bi <- allBuildInfo pkg
, UnknownExtension name <- allExtensions bi
, name `notElem` map display knownLanguages ]
deprecatedExtensions = nub $ catMaybes
[ find ((==ext) . fst) Extension.deprecatedExtensions
| bi <- allBuildInfo pkg
, ext <- allExtensions bi ]
languagesUsedAsExtensions =
[ name | bi <- allBuildInfo pkg
, UnknownExtension name <- allExtensions bi
, name `elem` map display knownLanguages ]
testedWithImpossibleRanges =
[ Dependency (PackageName (display compiler)) vr
| (compiler, vr) <- testedWith pkg
, isNoVersion vr ]
checkLicense :: PackageDescription -> [PackageCheck]
checkLicense pkg =
catMaybes [
check (license pkg == UnspecifiedLicense) $
PackageDistInexcusable
"The 'license' field is missing."
, check (license pkg == AllRightsReserved) $
PackageDistSuspicious
"The 'license' is AllRightsReserved. Is that really what you want?"
, case license pkg of
UnknownLicense l -> Just $
PackageBuildWarning $
quote ("license: " ++ l) ++ " is not a recognised license. The "
++ "known licenses are: "
++ commaSep (map display knownLicenses)
_ -> Nothing
, check (license pkg == BSD4) $
PackageDistSuspicious $
"Using 'license: BSD4' is almost always a misunderstanding. 'BSD4' "
++ "refers to the old 4-clause BSD license with the advertising "
++ "clause. 'BSD3' refers the new 3-clause BSD license."
, case unknownLicenseVersion (license pkg) of
Just knownVersions -> Just $
PackageDistSuspicious $
"'license: " ++ display (license pkg) ++ "' is not a known "
++ "version of that license. The known versions are "
++ commaSep (map display knownVersions)
++ ". If this is not a mistake and you think it should be a known "
++ "version then please file a ticket."
_ -> Nothing
, check (license pkg `notElem` [ AllRightsReserved
, UnspecifiedLicense, PublicDomain]
-- AllRightsReserved and PublicDomain are not strictly
-- licenses so don't need license files.
&& null (licenseFiles pkg)) $
PackageDistSuspicious "A 'license-file' is not specified."
]
where
unknownLicenseVersion (GPL (Just v))
| v `notElem` knownVersions = Just knownVersions
where knownVersions = [ v' | GPL (Just v') <- knownLicenses ]
unknownLicenseVersion (LGPL (Just v))
| v `notElem` knownVersions = Just knownVersions
where knownVersions = [ v' | LGPL (Just v') <- knownLicenses ]
unknownLicenseVersion (AGPL (Just v))
| v `notElem` knownVersions = Just knownVersions
where knownVersions = [ v' | AGPL (Just v') <- knownLicenses ]
unknownLicenseVersion (Apache (Just v))
| v `notElem` knownVersions = Just knownVersions
where knownVersions = [ v' | Apache (Just v') <- knownLicenses ]
unknownLicenseVersion _ = Nothing
checkSourceRepos :: PackageDescription -> [PackageCheck]
checkSourceRepos pkg =
catMaybes $ concat [[
case repoKind repo of
RepoKindUnknown kind -> Just $ PackageDistInexcusable $
quote kind ++ " is not a recognised kind of source-repository. "
++ "The repo kind is usually 'head' or 'this'"
_ -> Nothing
, check (isNothing (repoType repo)) $
PackageDistInexcusable
"The source-repository 'type' is a required field."
, check (isNothing (repoLocation repo)) $
PackageDistInexcusable
"The source-repository 'location' is a required field."
, check (repoType repo == Just CVS && isNothing (repoModule repo)) $
PackageDistInexcusable
"For a CVS source-repository, the 'module' is a required field."
, check (repoKind repo == RepoThis && isNothing (repoTag repo)) $
PackageDistInexcusable $
"For the 'this' kind of source-repository, the 'tag' is a required "
++ "field. It should specify the tag corresponding to this version "
++ "or release of the package."
, check (maybe False System.FilePath.isAbsolute (repoSubdir repo)) $
PackageDistInexcusable
"The 'subdir' field of a source-repository must be a relative path."
]
| repo <- sourceRepos pkg ]
--TODO: check location looks like a URL for some repo types.
checkGhcOptions :: PackageDescription -> [PackageCheck]
checkGhcOptions pkg =
catMaybes [
checkFlags ["-fasm"] $
PackageDistInexcusable $
"'ghc-options: -fasm' is unnecessary and will not work on CPU "
++ "architectures other than x86, x86-64, ppc or sparc."
, checkFlags ["-fvia-C"] $
PackageDistSuspicious $
"'ghc-options: -fvia-C' is usually unnecessary. If your package "
++ "needs -via-C for correctness rather than performance then it "
++ "is using the FFI incorrectly and will probably not work with GHC "
++ "6.10 or later."
, checkFlags ["-fhpc"] $
PackageDistInexcusable $
"'ghc-options: -fhpc' is not not necessary. Use the configure flag "
++ " --enable-coverage instead."
, checkFlags ["-prof"] $
PackageBuildWarning $
"'ghc-options: -prof' is not necessary and will lead to problems "
++ "when used on a library. Use the configure flag "
++ "--enable-library-profiling and/or --enable-executable-profiling."
, checkFlags ["-o"] $
PackageBuildWarning $
"'ghc-options: -o' is not needed. "
++ "The output files are named automatically."
, checkFlags ["-hide-package"] $
PackageBuildWarning $
"'ghc-options: -hide-package' is never needed. "
++ "Cabal hides all packages."
, checkFlags ["--make"] $
PackageBuildWarning $
"'ghc-options: --make' is never needed. Cabal uses this automatically."
, checkFlags ["-main-is"] $
PackageDistSuspicious $
"'ghc-options: -main-is' is not portable."
, checkFlags ["-O0", "-Onot"] $
PackageDistSuspicious $
"'ghc-options: -O0' is not needed. "
++ "Use the --disable-optimization configure flag."
, checkFlags [ "-O", "-O1"] $
PackageDistInexcusable $
"'ghc-options: -O' is not needed. "
++ "Cabal automatically adds the '-O' flag. "
++ "Setting it yourself interferes with the --disable-optimization flag."
, checkFlags ["-O2"] $
PackageDistSuspiciousWarn $
"'ghc-options: -O2' is rarely needed. "
++ "Check that it is giving a real benefit "
++ "and not just imposing longer compile times on your users."
, checkFlags ["-split-objs"] $
PackageBuildWarning $
"'ghc-options: -split-objs' is not needed. "
++ "Use the --enable-split-objs configure flag."
, checkFlags ["-optl-Wl,-s", "-optl-s"] $
PackageDistInexcusable $
"'ghc-options: -optl-Wl,-s' is not needed and is not portable to all"
++ " operating systems. Cabal 1.4 and later automatically strip"
++ " executables. Cabal also has a flag --disable-executable-stripping"
++ " which is necessary when building packages for some Linux"
++ " distributions and using '-optl-Wl,-s' prevents that from working."
, checkFlags ["-fglasgow-exts"] $
PackageDistSuspicious $
"Instead of 'ghc-options: -fglasgow-exts' it is preferable to use "
++ "the 'extensions' field."
, check ("-threaded" `elem` lib_ghc_options) $
PackageBuildWarning $
"'ghc-options: -threaded' has no effect for libraries. It should "
++ "only be used for executables."
, check ("-rtsopts" `elem` lib_ghc_options) $
PackageBuildWarning $
"'ghc-options: -rtsopts' has no effect for libraries. It should "
++ "only be used for executables."
, check (any (\opt -> "-with-rtsopts" `isPrefixOf` opt) lib_ghc_options) $
PackageBuildWarning $
"'ghc-options: -with-rtsopts' has no effect for libraries. It "
++ "should only be used for executables."
, checkAlternatives "ghc-options" "extensions"
[ (flag, display extension) | flag <- all_ghc_options
, Just extension <- [ghcExtension flag] ]
, checkAlternatives "ghc-options" "extensions"
[ (flag, extension) | flag@('-':'X':extension) <- all_ghc_options ]
, checkAlternatives "ghc-options" "cpp-options" $
[ (flag, flag) | flag@('-':'D':_) <- all_ghc_options ]
++ [ (flag, flag) | flag@('-':'U':_) <- all_ghc_options ]
, checkAlternatives "ghc-options" "include-dirs"
[ (flag, dir) | flag@('-':'I':dir) <- all_ghc_options ]
, checkAlternatives "ghc-options" "extra-libraries"
[ (flag, lib) | flag@('-':'l':lib) <- all_ghc_options ]
, checkAlternatives "ghc-options" "extra-lib-dirs"
[ (flag, dir) | flag@('-':'L':dir) <- all_ghc_options ]
]
where
all_ghc_options = concatMap get_ghc_options (allBuildInfo pkg)
lib_ghc_options = maybe [] (get_ghc_options . libBuildInfo) (library pkg)
get_ghc_options bi = hcOptions GHC bi ++ hcProfOptions GHC bi
checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck
checkFlags flags = check (any (`elem` flags) all_ghc_options)
ghcExtension ('-':'f':name) = case name of
"allow-overlapping-instances" -> enable OverlappingInstances
"no-allow-overlapping-instances" -> disable OverlappingInstances
"th" -> enable TemplateHaskell
"no-th" -> disable TemplateHaskell
"ffi" -> enable ForeignFunctionInterface
"no-ffi" -> disable ForeignFunctionInterface
"fi" -> enable ForeignFunctionInterface
"no-fi" -> disable ForeignFunctionInterface
"monomorphism-restriction" -> enable MonomorphismRestriction
"no-monomorphism-restriction" -> disable MonomorphismRestriction
"mono-pat-binds" -> enable MonoPatBinds
"no-mono-pat-binds" -> disable MonoPatBinds
"allow-undecidable-instances" -> enable UndecidableInstances
"no-allow-undecidable-instances" -> disable UndecidableInstances
"allow-incoherent-instances" -> enable IncoherentInstances
"no-allow-incoherent-instances" -> disable IncoherentInstances
"arrows" -> enable Arrows
"no-arrows" -> disable Arrows
"generics" -> enable Generics
"no-generics" -> disable Generics
"implicit-prelude" -> enable ImplicitPrelude
"no-implicit-prelude" -> disable ImplicitPrelude
"implicit-params" -> enable ImplicitParams
"no-implicit-params" -> disable ImplicitParams
"bang-patterns" -> enable BangPatterns
"no-bang-patterns" -> disable BangPatterns
"scoped-type-variables" -> enable ScopedTypeVariables
"no-scoped-type-variables" -> disable ScopedTypeVariables
"extended-default-rules" -> enable ExtendedDefaultRules
"no-extended-default-rules" -> disable ExtendedDefaultRules
_ -> Nothing
ghcExtension "-cpp" = enable CPP
ghcExtension _ = Nothing
enable e = Just (EnableExtension e)
disable e = Just (DisableExtension e)
checkCCOptions :: PackageDescription -> [PackageCheck]
checkCCOptions pkg =
catMaybes [
checkAlternatives "cc-options" "include-dirs"
[ (flag, dir) | flag@('-':'I':dir) <- all_ccOptions ]
, checkAlternatives "cc-options" "extra-libraries"
[ (flag, lib) | flag@('-':'l':lib) <- all_ccOptions ]
, checkAlternatives "cc-options" "extra-lib-dirs"
[ (flag, dir) | flag@('-':'L':dir) <- all_ccOptions ]
, checkAlternatives "ld-options" "extra-libraries"
[ (flag, lib) | flag@('-':'l':lib) <- all_ldOptions ]
, checkAlternatives "ld-options" "extra-lib-dirs"
[ (flag, dir) | flag@('-':'L':dir) <- all_ldOptions ]
, checkCCFlags [ "-O", "-Os", "-O0", "-O1", "-O2", "-O3" ] $
PackageDistSuspicious $
"'cc-options: -O[n]' is generally not needed. When building with "
++ " optimisations Cabal automatically adds '-O2' for C code. "
++ "Setting it yourself interferes with the --disable-optimization "
++ "flag."
]
where all_ccOptions = [ opts | bi <- allBuildInfo pkg
, opts <- ccOptions bi ]
all_ldOptions = [ opts | bi <- allBuildInfo pkg
, opts <- ldOptions bi ]
checkCCFlags :: [String] -> PackageCheck -> Maybe PackageCheck
checkCCFlags flags = check (any (`elem` flags) all_ccOptions)
checkCPPOptions :: PackageDescription -> [PackageCheck]
checkCPPOptions pkg =
catMaybes [
checkAlternatives "cpp-options" "include-dirs"
[ (flag, dir) | flag@('-':'I':dir) <- all_cppOptions]
]
where all_cppOptions = [ opts | bi <- allBuildInfo pkg
, opts <- cppOptions bi ]
checkAlternatives :: String -> String -> [(String, String)] -> Maybe PackageCheck
checkAlternatives badField goodField flags =
check (not (null badFlags)) $
PackageBuildWarning $
"Instead of " ++ quote (badField ++ ": " ++ unwords badFlags)
++ " use " ++ quote (goodField ++ ": " ++ unwords goodFlags)
where (badFlags, goodFlags) = unzip flags
checkPaths :: PackageDescription -> [PackageCheck]
checkPaths pkg =
[ PackageBuildWarning $
quote (kind ++ ": " ++ path)
++ " is a relative path outside of the source tree. "
++ "This will not work when generating a tarball with 'sdist'."
| (path, kind) <- relPaths ++ absPaths
, isOutsideTree path ]
++
[ PackageDistInexcusable $
quote (kind ++ ": " ++ path) ++ " is an absolute directory."
| (path, kind) <- relPaths
, isAbsolute path ]
++
[ PackageDistInexcusable $
quote (kind ++ ": " ++ path) ++ " points inside the 'dist' "
++ "directory. This is not reliable because the location of this "
++ "directory is configurable by the user (or package manager). In "
++ "addition the layout of the 'dist' directory is subject to change "
++ "in future versions of Cabal."
| (path, kind) <- relPaths ++ absPaths
, isInsideDist path ]
++
[ PackageDistInexcusable $
"The 'ghc-options' contains the path '" ++ path ++ "' which points "
++ "inside the 'dist' directory. This is not reliable because the "
++ "location of this directory is configurable by the user (or package "
++ "manager). In addition the layout of the 'dist' directory is subject "
++ "to change in future versions of Cabal."
| bi <- allBuildInfo pkg
, (GHC, flags) <- options bi
, path <- flags
, isInsideDist path ]
where
isOutsideTree path = case splitDirectories path of
"..":_ -> True
".":"..":_ -> True
_ -> False
isInsideDist path = case map lowercase (splitDirectories path) of
"dist" :_ -> True
".":"dist":_ -> True
_ -> False
-- paths that must be relative
relPaths =
[ (path, "extra-src-files") | path <- extraSrcFiles pkg ]
++ [ (path, "extra-tmp-files") | path <- extraTmpFiles pkg ]
++ [ (path, "extra-doc-files") | path <- extraDocFiles pkg ]
++ [ (path, "data-files") | path <- dataFiles pkg ]
++ [ (path, "data-dir") | path <- [dataDir pkg]]
++ concat
[ [ (path, "c-sources") | path <- cSources bi ]
++ [ (path, "js-sources") | path <- jsSources bi ]
++ [ (path, "install-includes") | path <- installIncludes bi ]
++ [ (path, "hs-source-dirs") | path <- hsSourceDirs bi ]
| bi <- allBuildInfo pkg ]
-- paths that are allowed to be absolute
absPaths = concat
[ [ (path, "includes") | path <- includes bi ]
++ [ (path, "include-dirs") | path <- includeDirs bi ]
++ [ (path, "extra-lib-dirs") | path <- extraLibDirs bi ]
| bi <- allBuildInfo pkg ]
--TODO: check sets of paths that would be interpreted differently between Unix
-- and windows, ie case-sensitive or insensitive. Things that might clash, or
-- conversely be distinguished.
--TODO: use the tar path checks on all the above paths
-- | Check that the package declares the version in the @\"cabal-version\"@
-- field correctly.
--
checkCabalVersion :: PackageDescription -> [PackageCheck]
checkCabalVersion pkg =
catMaybes [
-- check syntax of cabal-version field
check (specVersion pkg >= Version [1,10] []
&& not simpleSpecVersionRangeSyntax) $
PackageBuildWarning $
"Packages relying on Cabal 1.10 or later must only specify a "
++ "version range of the form 'cabal-version: >= x.y'. Use "
++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'."
-- check syntax of cabal-version field
, check (specVersion pkg < Version [1,9] []
&& not simpleSpecVersionRangeSyntax) $
PackageDistSuspicious $
"It is recommended that the 'cabal-version' field only specify a "
++ "version range of the form '>= x.y'. Use "
++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'. "
++ "Tools based on Cabal 1.10 and later will ignore upper bounds."
-- check syntax of cabal-version field
, checkVersion [1,12] simpleSpecVersionSyntax $
PackageBuildWarning $
"With Cabal 1.10 or earlier, the 'cabal-version' field must use "
++ "range syntax rather than a simple version number. Use "
++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'."
-- check use of test suite sections
, checkVersion [1,8] (not (null $ testSuites pkg)) $
PackageDistInexcusable $
"The 'test-suite' section is new in Cabal 1.10. "
++ "Unfortunately it messes up the parser in older Cabal versions "
++ "so you must specify at least 'cabal-version: >= 1.8', but note "
++ "that only Cabal 1.10 and later can actually run such test suites."
-- check use of default-language field
-- note that we do not need to do an equivalent check for the
-- other-language field since that one does not change behaviour
, checkVersion [1,10] (any isJust (buildInfoField defaultLanguage)) $
PackageBuildWarning $
"To use the 'default-language' field the package needs to specify "
++ "at least 'cabal-version: >= 1.10'."
, check (specVersion pkg >= Version [1,10] []
&& (any isNothing (buildInfoField defaultLanguage))) $
PackageBuildWarning $
"Packages using 'cabal-version: >= 1.10' must specify the "
++ "'default-language' field for each component (e.g. Haskell98 or "
++ "Haskell2010). If a component uses different languages in "
++ "different modules then list the other ones in the "
++ "'other-languages' field."
-- check use of reexported-modules sections
, checkVersion [1,21]
(maybe False (not.null.reexportedModules) (library pkg)) $
PackageDistInexcusable $
"To use the 'reexported-module' field the package needs to specify "
++ "at least 'cabal-version: >= 1.21'."
-- check use of thinning and renaming
, checkVersion [1,21] (not (null depsUsingThinningRenamingSyntax)) $
PackageDistInexcusable $
"The package uses "
++ "thinning and renaming in the 'build-depends' field: "
++ commaSep (map display depsUsingThinningRenamingSyntax)
++ ". To use this new syntax, the package needs to specify at least"
++ "'cabal-version: >= 1.21'."
-- check use of default-extensions field
-- don't need to do the equivalent check for other-extensions
, checkVersion [1,10] (any (not . null) (buildInfoField defaultExtensions)) $
PackageBuildWarning $
"To use the 'default-extensions' field the package needs to specify "
++ "at least 'cabal-version: >= 1.10'."
-- check use of extensions field
, check (specVersion pkg >= Version [1,10] []
&& (any (not . null) (buildInfoField oldExtensions))) $
PackageBuildWarning $
"For packages using 'cabal-version: >= 1.10' the 'extensions' "
++ "field is deprecated. The new 'default-extensions' field lists "
++ "extensions that are used in all modules in the component, while "
++ "the 'other-extensions' field lists extensions that are used in "
++ "some modules, e.g. via the {-# LANGUAGE #-} pragma."
-- check use of "foo (>= 1.0 && < 1.4) || >=1.8 " version-range syntax
, checkVersion [1,8] (not (null versionRangeExpressions)) $
PackageDistInexcusable $
"The package uses full version-range expressions "
++ "in a 'build-depends' field: "
++ commaSep (map displayRawDependency versionRangeExpressions)
++ ". To use this new syntax the package needs to specify at least "
++ "'cabal-version: >= 1.8'. Alternatively, if broader compatibility "
++ "is important, then convert to conjunctive normal form, and use "
++ "multiple 'build-depends:' lines, one conjunct per line."
-- check use of "build-depends: foo == 1.*" syntax
, checkVersion [1,6] (not (null depsUsingWildcardSyntax)) $
PackageDistInexcusable $
"The package uses wildcard syntax in the 'build-depends' field: "
++ commaSep (map display depsUsingWildcardSyntax)
++ ". To use this new syntax the package need to specify at least "
++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility "
++ "is important then use: " ++ commaSep
[ display (Dependency name (eliminateWildcardSyntax versionRange))
| Dependency name versionRange <- depsUsingWildcardSyntax ]
-- check use of "tested-with: GHC (>= 1.0 && < 1.4) || >=1.8 " syntax
, checkVersion [1,8] (not (null testedWithVersionRangeExpressions)) $
PackageDistInexcusable $
"The package uses full version-range expressions "
++ "in a 'tested-with' field: "
++ commaSep (map displayRawDependency testedWithVersionRangeExpressions)
++ ". To use this new syntax the package needs to specify at least "
++ "'cabal-version: >= 1.8'."
-- check use of "tested-with: GHC == 6.12.*" syntax
, checkVersion [1,6] (not (null testedWithUsingWildcardSyntax)) $
PackageDistInexcusable $
"The package uses wildcard syntax in the 'tested-with' field: "
++ commaSep (map display testedWithUsingWildcardSyntax)
++ ". To use this new syntax the package need to specify at least "
++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility "
++ "is important then use: " ++ commaSep
[ display (Dependency name (eliminateWildcardSyntax versionRange))
| Dependency name versionRange <- testedWithUsingWildcardSyntax ]
-- check use of "data-files: data/*.txt" syntax
, checkVersion [1,6] (not (null dataFilesUsingGlobSyntax)) $
PackageDistInexcusable $
"Using wildcards like "
++ commaSep (map quote $ take 3 dataFilesUsingGlobSyntax)
++ " in the 'data-files' field requires 'cabal-version: >= 1.6'. "
++ "Alternatively if you require compatibility with earlier Cabal "
++ "versions then list all the files explicitly."
-- check use of "extra-source-files: mk/*.in" syntax
, checkVersion [1,6] (not (null extraSrcFilesUsingGlobSyntax)) $
PackageDistInexcusable $
"Using wildcards like "
++ commaSep (map quote $ take 3 extraSrcFilesUsingGlobSyntax)
++ " in the 'extra-source-files' field requires "
++ "'cabal-version: >= 1.6'. Alternatively if you require "
++ "compatibility with earlier Cabal versions then list all the files "
++ "explicitly."
-- check use of "source-repository" section
, checkVersion [1,6] (not (null (sourceRepos pkg))) $
PackageDistInexcusable $
"The 'source-repository' section is new in Cabal 1.6. "
++ "Unfortunately it messes up the parser in earlier Cabal versions "
++ "so you need to specify 'cabal-version: >= 1.6'."
-- check for new licenses
, checkVersion [1,4] (license pkg `notElem` compatLicenses) $
PackageDistInexcusable $
"Unfortunately the license " ++ quote (display (license pkg))
++ " messes up the parser in earlier Cabal versions so you need to "
++ "specify 'cabal-version: >= 1.4'. Alternatively if you require "
++ "compatibility with earlier Cabal versions then use 'OtherLicense'."
-- check for new language extensions
, checkVersion [1,2,3] (not (null mentionedExtensionsThatNeedCabal12)) $
PackageDistInexcusable $
"Unfortunately the language extensions "
++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal12)
++ " break the parser in earlier Cabal versions so you need to "
++ "specify 'cabal-version: >= 1.2.3'. Alternatively if you require "
++ "compatibility with earlier Cabal versions then you may be able to "
++ "use an equivalent compiler-specific flag."
, checkVersion [1,4] (not (null mentionedExtensionsThatNeedCabal14)) $
PackageDistInexcusable $
"Unfortunately the language extensions "
++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal14)
++ " break the parser in earlier Cabal versions so you need to "
++ "specify 'cabal-version: >= 1.4'. Alternatively if you require "
++ "compatibility with earlier Cabal versions then you may be able to "
++ "use an equivalent compiler-specific flag."
, check (specVersion pkg >= Version [1,23] []
&& isNothing (setupBuildInfo pkg)
&& buildType pkg == Just Custom) $
PackageBuildWarning $
"Packages using 'cabal-version: >= 1.23' with 'build-type: Custom' "
++ "must use a 'custom-setup' section with a 'setup-depends' field "
++ "that specifies the dependencies of the Setup.hs script itself. "
++ "The 'setup-depends' field uses the same syntax as 'build-depends', "
++ "so a simple example would be 'setup-depends: base, Cabal'."
, check (specVersion pkg < Version [1,23] []
&& isNothing (setupBuildInfo pkg)
&& buildType pkg == Just Custom) $
PackageBuildWarning $
"From version 1.23 cabal supports specifiying explicit dependencies "
++ "for Custom setup scripts. Consider using cabal-version >= 1.23 and "
++ "adding a 'custom-setup' section with a 'setup-depends' field "
++ "that specifies the dependencies of the Setup.hs script itself. "
++ "The 'setup-depends' field uses the same syntax as 'build-depends', "
++ "so a simple example would be 'setup-depends: base, Cabal'."
]
where
-- Perform a check on packages that use a version of the spec less than
-- the version given. This is for cases where a new Cabal version adds
-- a new feature and we want to check that it is not used prior to that
-- version.
checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck
checkVersion ver cond pc
| specVersion pkg >= Version ver [] = Nothing
| otherwise = check cond pc
buildInfoField field = map field (allBuildInfo pkg)
dataFilesUsingGlobSyntax = filter usesGlobSyntax (dataFiles pkg)
extraSrcFilesUsingGlobSyntax = filter usesGlobSyntax (extraSrcFiles pkg)
usesGlobSyntax str = case parseFileGlob str of
Just (FileGlob _ _) -> True
_ -> False
versionRangeExpressions =
[ dep | dep@(Dependency _ vr) <- buildDepends pkg
, usesNewVersionRangeSyntax vr ]
testedWithVersionRangeExpressions =
[ Dependency (PackageName (display compiler)) vr
| (compiler, vr) <- testedWith pkg
, usesNewVersionRangeSyntax vr ]
simpleSpecVersionRangeSyntax =
either (const True)
(foldVersionRange'
True
(\_ -> False)
(\_ -> False) (\_ -> False)
(\_ -> True) -- >=
(\_ -> False)
(\_ _ -> False)
(\_ _ -> False) (\_ _ -> False)
id)
(specVersionRaw pkg)
-- is the cabal-version field a simple version number, rather than a range
simpleSpecVersionSyntax =
either (const True) (const False) (specVersionRaw pkg)
usesNewVersionRangeSyntax :: VersionRange -> Bool
usesNewVersionRangeSyntax =
(> 2) -- uses the new syntax if depth is more than 2
. foldVersionRange'
(1 :: Int)
(const 1)
(const 1) (const 1)
(const 1) (const 1)
(const (const 1))
(+) (+)
(const 3) -- uses new ()'s syntax
depsUsingWildcardSyntax = [ dep | dep@(Dependency _ vr) <- buildDepends pkg
, usesWildcardSyntax vr ]
-- XXX: If the user writes build-depends: foo with (), this is
-- indistinguishable from build-depends: foo, so there won't be an
-- error even though there should be
depsUsingThinningRenamingSyntax =
[ name
| bi <- allBuildInfo pkg
, (name, rns) <- Map.toList (targetBuildRenaming bi)
, rns /= ModuleRenaming True [] ]
testedWithUsingWildcardSyntax =
[ Dependency (PackageName (display compiler)) vr
| (compiler, vr) <- testedWith pkg
, usesWildcardSyntax vr ]
usesWildcardSyntax :: VersionRange -> Bool
usesWildcardSyntax =
foldVersionRange'
False (const False)
(const False) (const False)
(const False) (const False)
(\_ _ -> True) -- the wildcard case
(||) (||) id
eliminateWildcardSyntax =
foldVersionRange'
anyVersion thisVersion
laterVersion earlierVersion
orLaterVersion orEarlierVersion
(\v v' -> intersectVersionRanges (orLaterVersion v) (earlierVersion v'))
intersectVersionRanges unionVersionRanges id
compatLicenses = [ GPL Nothing, LGPL Nothing, AGPL Nothing, BSD3, BSD4
, PublicDomain, AllRightsReserved
, UnspecifiedLicense, OtherLicense ]
mentionedExtensions = [ ext | bi <- allBuildInfo pkg
, ext <- allExtensions bi ]
mentionedExtensionsThatNeedCabal12 =
nub (filter (`elem` compatExtensionsExtra) mentionedExtensions)
-- As of Cabal-1.4 we can add new extensions without worrying about
-- breaking old versions of cabal.
mentionedExtensionsThatNeedCabal14 =
nub (filter (`notElem` compatExtensions) mentionedExtensions)
-- The known extensions in Cabal-1.2.3
compatExtensions =
map EnableExtension
[ OverlappingInstances, UndecidableInstances, IncoherentInstances
, RecursiveDo, ParallelListComp, MultiParamTypeClasses
, FunctionalDependencies, Rank2Types
, RankNTypes, PolymorphicComponents, ExistentialQuantification
, ScopedTypeVariables, ImplicitParams, FlexibleContexts
, FlexibleInstances, EmptyDataDecls, CPP, BangPatterns
, TypeSynonymInstances, TemplateHaskell, ForeignFunctionInterface
, Arrows, Generics, NamedFieldPuns, PatternGuards
, GeneralizedNewtypeDeriving, ExtensibleRecords, RestrictedTypeSynonyms
, HereDocuments] ++
map DisableExtension
[MonomorphismRestriction, ImplicitPrelude] ++
compatExtensionsExtra
-- The extra known extensions in Cabal-1.2.3 vs Cabal-1.1.6
-- (Cabal-1.1.6 came with ghc-6.6. Cabal-1.2 came with ghc-6.8)
compatExtensionsExtra =
map EnableExtension
[ KindSignatures, MagicHash, TypeFamilies, StandaloneDeriving
, UnicodeSyntax, PatternSignatures, UnliftedFFITypes, LiberalTypeSynonyms
, TypeOperators, RecordWildCards, RecordPuns, DisambiguateRecordFields
, OverloadedStrings, GADTs, RelaxedPolyRec
, ExtendedDefaultRules, UnboxedTuples, DeriveDataTypeable
, ConstrainedClassMethods
] ++
map DisableExtension
[MonoPatBinds]
-- | A variation on the normal 'Text' instance, shows any ()'s in the original
-- textual syntax. We need to show these otherwise it's confusing to users when
-- we complain of their presence but do not pretty print them!
--
displayRawVersionRange :: VersionRange -> String
displayRawVersionRange =
Disp.render
. fst
. foldVersionRange' -- precedence:
-- All the same as the usual pretty printer, except for the parens
( Disp.text "-any" , 0 :: Int)
(\v -> (Disp.text "==" <> disp v , 0))
(\v -> (Disp.char '>' <> disp v , 0))
(\v -> (Disp.char '<' <> disp v , 0))
(\v -> (Disp.text ">=" <> disp v , 0))
(\v -> (Disp.text "<=" <> disp v , 0))
(\v _ -> (Disp.text "==" <> dispWild v , 0))
(\(r1, p1) (r2, p2) ->
(punct 2 p1 r1 <+> Disp.text "||" <+> punct 2 p2 r2 , 2))
(\(r1, p1) (r2, p2) ->
(punct 1 p1 r1 <+> Disp.text "&&" <+> punct 1 p2 r2 , 1))
(\(r, _ ) -> (Disp.parens r, 0)) -- parens
where
dispWild (Version b _) =
Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int b))
<> Disp.text ".*"
punct p p' | p < p' = Disp.parens
| otherwise = id
displayRawDependency :: Dependency -> String
displayRawDependency (Dependency pkg vr) =
display pkg ++ " " ++ displayRawVersionRange vr
-- ------------------------------------------------------------
-- * Checks on the GenericPackageDescription
-- ------------------------------------------------------------
-- | Check the build-depends fields for any weirdness or bad practise.
--
checkPackageVersions :: GenericPackageDescription -> [PackageCheck]
checkPackageVersions pkg =
catMaybes [
-- Check that the version of base is bounded above.
-- For example this bans "build-depends: base >= 3".
-- It should probably be "build-depends: base >= 3 && < 4"
-- which is the same as "build-depends: base == 3.*"
check (not (boundedAbove baseDependency)) $
PackageDistInexcusable $
"The dependency 'build-depends: base' does not specify an upper "
++ "bound on the version number. Each major release of the 'base' "
++ "package changes the API in various ways and most packages will "
++ "need some changes to compile with it. The recommended practise "
++ "is to specify an upper bound on the version of the 'base' "
++ "package. This ensures your package will continue to build when a "
++ "new major version of the 'base' package is released. If you are "
++ "not sure what upper bound to use then use the next major "
++ "version. For example if you have tested your package with 'base' "
++ "version 4.5 and 4.6 then use 'build-depends: base >= 4.5 && < 4.7'."
]
where
-- TODO: What we really want to do is test if there exists any
-- configuration in which the base version is unbounded above.
-- However that's a bit tricky because there are many possible
-- configurations. As a cheap easy and safe approximation we will
-- pick a single "typical" configuration and check if that has an
-- open upper bound. To get a typical configuration we finalise
-- using no package index and the current platform.
finalised = finalizePackageDescription
[] (const True) buildPlatform
(unknownCompilerInfo
(CompilerId buildCompilerFlavor (Version [] [])) NoAbiTag)
[] pkg
baseDependency = case finalised of
Right (pkg', _) | not (null baseDeps) ->
foldr intersectVersionRanges anyVersion baseDeps
where
baseDeps =
[ vr | Dependency (PackageName "base") vr <- buildDepends pkg' ]
-- Just in case finalizePackageDescription fails for any reason,
-- or if the package doesn't depend on the base package at all,
-- then we will just skip the check, since boundedAbove noVersion = True
_ -> noVersion
boundedAbove :: VersionRange -> Bool
boundedAbove vr = case asVersionIntervals vr of
[] -> True -- this is the inconsistent version range.
intervals -> case last intervals of
(_, UpperBound _ _) -> True
(_, NoUpperBound ) -> False
checkConditionals :: GenericPackageDescription -> [PackageCheck]
checkConditionals pkg =
catMaybes [
check (not $ null unknownOSs) $
PackageDistInexcusable $
"Unknown operating system name "
++ commaSep (map quote unknownOSs)
, check (not $ null unknownArches) $
PackageDistInexcusable $
"Unknown architecture name "
++ commaSep (map quote unknownArches)
, check (not $ null unknownImpls) $
PackageDistInexcusable $
"Unknown compiler name "
++ commaSep (map quote unknownImpls)
]
where
unknownOSs = [ os | OS (OtherOS os) <- conditions ]
unknownArches = [ arch | Arch (OtherArch arch) <- conditions ]
unknownImpls = [ impl | Impl (OtherCompiler impl) _ <- conditions ]
conditions = maybe [] freeVars (condLibrary pkg)
++ concatMap (freeVars . snd) (condExecutables pkg)
freeVars (CondNode _ _ ifs) = concatMap compfv ifs
compfv (c, ct, mct) = condfv c ++ freeVars ct ++ maybe [] freeVars mct
condfv c = case c of
Var v -> [v]
Lit _ -> []
CNot c1 -> condfv c1
COr c1 c2 -> condfv c1 ++ condfv c2
CAnd c1 c2 -> condfv c1 ++ condfv c2
checkDevelopmentOnlyFlagsBuildInfo :: BuildInfo -> [PackageCheck]
checkDevelopmentOnlyFlagsBuildInfo bi =
catMaybes [
check has_WerrorWall $
PackageDistInexcusable $
"'ghc-options: -Wall -Werror' makes the package very easy to "
++ "break with future GHC versions because new GHC versions often "
++ "add new warnings. Use just 'ghc-options: -Wall' instead."
++ extraExplanation
, check (not has_WerrorWall && has_Werror) $
PackageDistInexcusable $
"'ghc-options: -Werror' makes the package easy to "
++ "break with future GHC versions because new GHC versions often "
++ "add new warnings. "
++ extraExplanation
, checkFlags ["-fdefer-type-errors"] $
PackageDistInexcusable $
"'ghc-options: -fdefer-type-errors' is fine during development but "
++ "is not appropriate for a distributed package. "
++ extraExplanation
-- -dynamic is not a debug flag
, check (any (\opt -> "-d" `isPrefixOf` opt && opt /= "-dynamic")
ghc_options) $
PackageDistInexcusable $
"'ghc-options: -d*' debug flags are not appropriate "
++ "for a distributed package. "
++ extraExplanation
, checkFlags ["-fprof-auto", "-fprof-auto-top", "-fprof-auto-calls",
"-fprof-cafs", "-fno-prof-count-entries",
"-auto-all", "-auto", "-caf-all"] $
PackageDistSuspicious $
"'ghc-options: -fprof*' profiling flags are typically not "
++ "appropriate for a distributed library package. These flags are "
++ "useful to profile this package, but when profiling other packages "
++ "that use this one these flags clutter the profile output with "
++ "excessive detail. If you think other packages really want to see "
++ "cost centres from this package then use '-fprof-auto-exported' "
++ "which puts cost centres only on exported functions. "
++ extraExplanation
]
where
extraExplanation =
" Alternatively, if you want to use this, make it conditional based "
++ "on a Cabal configuration flag (with 'manual: True' and 'default: "
++ "False') and enable that flag during development."
has_WerrorWall = has_Werror && ( has_Wall || has_W )
has_Werror = "-Werror" `elem` ghc_options
has_Wall = "-Wall" `elem` ghc_options
has_W = "-W" `elem` ghc_options
ghc_options = hcOptions GHC bi ++ hcProfOptions GHC bi
checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck
checkFlags flags = check (any (`elem` flags) ghc_options)
checkDevelopmentOnlyFlags :: GenericPackageDescription -> [PackageCheck]
checkDevelopmentOnlyFlags pkg =
concatMap checkDevelopmentOnlyFlagsBuildInfo
[ bi
| (conditions, bi) <- allConditionalBuildInfo
, not (any guardedByManualFlag conditions) ]
where
guardedByManualFlag = definitelyFalse
-- We've basically got three-values logic here: True, False or unknown
-- hence this pattern to propagate the unknown cases properly.
definitelyFalse (Var (Flag n)) = maybe False not (Map.lookup n manualFlags)
definitelyFalse (Var _) = False
definitelyFalse (Lit b) = not b
definitelyFalse (CNot c) = definitelyTrue c
definitelyFalse (COr c1 c2) = definitelyFalse c1 && definitelyFalse c2
definitelyFalse (CAnd c1 c2) = definitelyFalse c1 || definitelyFalse c2
definitelyTrue (Var (Flag n)) = fromMaybe False (Map.lookup n manualFlags)
definitelyTrue (Var _) = False
definitelyTrue (Lit b) = b
definitelyTrue (CNot c) = definitelyFalse c
definitelyTrue (COr c1 c2) = definitelyTrue c1 || definitelyTrue c2
definitelyTrue (CAnd c1 c2) = definitelyTrue c1 && definitelyTrue c2
manualFlags = Map.fromList
[ (flagName flag, flagDefault flag)
| flag <- genPackageFlags pkg
, flagManual flag ]
allConditionalBuildInfo :: [([Condition ConfVar], BuildInfo)]
allConditionalBuildInfo =
concatMap (collectCondTreePaths libBuildInfo)
(maybeToList (condLibrary pkg))
++ concatMap (collectCondTreePaths buildInfo . snd)
(condExecutables pkg)
++ concatMap (collectCondTreePaths testBuildInfo . snd)
(condTestSuites pkg)
++ concatMap (collectCondTreePaths benchmarkBuildInfo . snd)
(condBenchmarks pkg)
-- get all the leaf BuildInfo, paired up with the path (in the tree sense)
-- of if-conditions that guard it
collectCondTreePaths :: (a -> b)
-> CondTree v c a
-> [([Condition v], b)]
collectCondTreePaths mapData = go []
where
go conditions condNode =
-- the data at this level in the tree:
(reverse conditions, mapData (condTreeData condNode))
: concat
[ go (condition:conditions) ifThen
| (condition, ifThen, _) <- condTreeComponents condNode ]
++ concat
[ go (condition:conditions) elseThen
| (condition, _, Just elseThen) <- condTreeComponents condNode ]
-- ------------------------------------------------------------
-- * Checks involving files in the package
-- ------------------------------------------------------------
-- | Sanity check things that requires IO. It looks at the files in the
-- package and expects to find the package unpacked in at the given file path.
--
checkPackageFiles :: PackageDescription -> FilePath -> IO [PackageCheck]
checkPackageFiles pkg root = checkPackageContent checkFilesIO pkg
where
checkFilesIO = CheckPackageContentOps {
doesFileExist = System.doesFileExist . relative,
doesDirectoryExist = System.doesDirectoryExist . relative,
getDirectoryContents = System.Directory.getDirectoryContents . relative,
getFileContents = \f -> openBinaryFile (relative f) ReadMode >>= hGetContents
}
relative path = root </> path
-- | A record of operations needed to check the contents of packages.
-- Used by 'checkPackageContent'.
--
data CheckPackageContentOps m = CheckPackageContentOps {
doesFileExist :: FilePath -> m Bool,
doesDirectoryExist :: FilePath -> m Bool,
getDirectoryContents :: FilePath -> m [FilePath],
getFileContents :: FilePath -> m String
}
-- | Sanity check things that requires looking at files in the package.
-- This is a generalised version of 'checkPackageFiles' that can work in any
-- monad for which you can provide 'CheckPackageContentOps' operations.
--
-- The point of this extra generality is to allow doing checks in some virtual
-- file system, for example a tarball in memory.
--
checkPackageContent :: Monad m => CheckPackageContentOps m
-> PackageDescription
-> m [PackageCheck]
checkPackageContent ops pkg = do
cabalBomError <- checkCabalFileBOM ops
licenseErrors <- checkLicensesExist ops pkg
setupError <- checkSetupExists ops pkg
configureError <- checkConfigureExists ops pkg
localPathErrors <- checkLocalPathsExist ops pkg
vcsLocation <- checkMissingVcsInfo ops pkg
return $ licenseErrors
++ catMaybes [cabalBomError, setupError, configureError]
++ localPathErrors
++ vcsLocation
checkCabalFileBOM :: Monad m => CheckPackageContentOps m
-> m (Maybe PackageCheck)
checkCabalFileBOM ops = do
epdfile <- findPackageDesc ops
case epdfile of
Left pc -> return $ Just pc
Right pdfile -> (flip check pc . startsWithBOM . fromUTF8) `liftM` (getFileContents ops pdfile)
where pc = PackageDistInexcusable $
pdfile ++ " starts with an Unicode byte order mark (BOM). This may cause problems with older cabal versions."
-- |Find a package description file in the given directory. Looks for
-- @.cabal@ files.
findPackageDesc :: Monad m => CheckPackageContentOps m
-> m (Either PackageCheck FilePath) -- ^<pkgname>.cabal
findPackageDesc ops
= do let dir = "."
files <- getDirectoryContents ops dir
-- to make sure we do not mistake a ~/.cabal/ dir for a <pkgname>.cabal
-- file we filter to exclude dirs and null base file names:
cabalFiles <- filterM (doesFileExist ops)
[ dir </> file
| file <- files
, let (name, ext) = splitExtension file
, not (null name) && ext == ".cabal" ]
case cabalFiles of
[] -> return (Left $ PackageBuildImpossible noDesc)
[cabalFile] -> return (Right cabalFile)
multiple -> return (Left $ PackageBuildImpossible $ multiDesc multiple)
where
noDesc :: String
noDesc = "No cabal file found.\n"
++ "Please create a package description file <pkgname>.cabal"
multiDesc :: [String] -> String
multiDesc l = "Multiple cabal files found.\n"
++ "Please use only one of: "
++ intercalate ", " l
checkLicensesExist :: Monad m => CheckPackageContentOps m
-> PackageDescription
-> m [PackageCheck]
checkLicensesExist ops pkg = do
exists <- mapM (doesFileExist ops) (licenseFiles pkg)
return
[ PackageBuildWarning $
"The '" ++ fieldname ++ "' field refers to the file "
++ quote file ++ " which does not exist."
| (file, False) <- zip (licenseFiles pkg) exists ]
where
fieldname | length (licenseFiles pkg) == 1 = "license-file"
| otherwise = "license-files"
checkSetupExists :: Monad m => CheckPackageContentOps m
-> PackageDescription
-> m (Maybe PackageCheck)
checkSetupExists ops _ = do
hsexists <- doesFileExist ops "Setup.hs"
lhsexists <- doesFileExist ops "Setup.lhs"
return $ check (not hsexists && not lhsexists) $
PackageDistInexcusable $
"The package is missing a Setup.hs or Setup.lhs script."
checkConfigureExists :: Monad m => CheckPackageContentOps m
-> PackageDescription
-> m (Maybe PackageCheck)
checkConfigureExists ops PackageDescription { buildType = Just Configure } = do
exists <- doesFileExist ops "configure"
return $ check (not exists) $
PackageBuildWarning $
"The 'build-type' is 'Configure' but there is no 'configure' script. "
++ "You probably need to run 'autoreconf -i' to generate it."
checkConfigureExists _ _ = return Nothing
checkLocalPathsExist :: Monad m => CheckPackageContentOps m
-> PackageDescription
-> m [PackageCheck]
checkLocalPathsExist ops pkg = do
let dirs = [ (dir, kind)
| bi <- allBuildInfo pkg
, (dir, kind) <-
[ (dir, "extra-lib-dirs") | dir <- extraLibDirs bi ]
++ [ (dir, "include-dirs") | dir <- includeDirs bi ]
++ [ (dir, "hs-source-dirs") | dir <- hsSourceDirs bi ]
, isRelative dir ]
missing <- filterM (liftM not . doesDirectoryExist ops . fst) dirs
return [ PackageBuildWarning {
explanation = quote (kind ++ ": " ++ dir)
++ " directory does not exist."
}
| (dir, kind) <- missing ]
checkMissingVcsInfo :: Monad m => CheckPackageContentOps m
-> PackageDescription
-> m [PackageCheck]
checkMissingVcsInfo ops pkg | null (sourceRepos pkg) = do
vcsInUse <- liftM or $ mapM (doesDirectoryExist ops) repoDirnames
if vcsInUse
then return [ PackageDistSuspicious message ]
else return []
where
repoDirnames = [ dirname | repo <- knownRepoTypes
, dirname <- repoTypeDirname repo ]
message = "When distributing packages it is encouraged to specify source "
++ "control information in the .cabal file using one or more "
++ "'source-repository' sections. See the Cabal user guide for "
++ "details."
checkMissingVcsInfo _ _ = return []
repoTypeDirname :: RepoType -> [FilePath]
repoTypeDirname Darcs = ["_darcs"]
repoTypeDirname Git = [".git"]
repoTypeDirname SVN = [".svn"]
repoTypeDirname CVS = ["CVS"]
repoTypeDirname Mercurial = [".hg"]
repoTypeDirname GnuArch = [".arch-params"]
repoTypeDirname Bazaar = [".bzr"]
repoTypeDirname Monotone = ["_MTN"]
repoTypeDirname _ = []
-- ------------------------------------------------------------
-- * Checks involving files in the package
-- ------------------------------------------------------------
-- | Check the names of all files in a package for portability problems. This
-- should be done for example when creating or validating a package tarball.
--
checkPackageFileNames :: [FilePath] -> [PackageCheck]
checkPackageFileNames files =
(take 1 . catMaybes . map checkWindowsPath $ files)
++ (take 1 . catMaybes . map checkTarPath $ files)
-- If we get any of these checks triggering then we're likely to get
-- many, and that's probably not helpful, so return at most one.
checkWindowsPath :: FilePath -> Maybe PackageCheck
checkWindowsPath path =
check (not $ FilePath.Windows.isValid path') $
PackageDistInexcusable $
"Unfortunately, the file " ++ quote path ++ " is not a valid file "
++ "name on Windows which would cause portability problems for this "
++ "package. Windows file names cannot contain any of the characters "
++ "\":*?<>|\" and there are a few reserved names including \"aux\", "
++ "\"nul\", \"con\", \"prn\", \"com1-9\", \"lpt1-9\" and \"clock$\"."
where
path' = ".\\" ++ path
-- force a relative name to catch invalid file names like "f:oo" which
-- otherwise parse as file "oo" in the current directory on the 'f' drive.
-- | Check a file name is valid for the portable POSIX tar format.
--
-- The POSIX tar format has a restriction on the length of file names. It is
-- unfortunately not a simple restriction like a maximum length. The exact
-- restriction is that either the whole path be 100 characters or less, or it
-- be possible to split the path on a directory separator such that the first
-- part is 155 characters or less and the second part 100 characters or less.
--
checkTarPath :: FilePath -> Maybe PackageCheck
checkTarPath path
| length path > 255 = Just longPath
| otherwise = case pack nameMax (reverse (splitPath path)) of
Left err -> Just err
Right [] -> Nothing
Right (first:rest) -> case pack prefixMax remainder of
Left err -> Just err
Right [] -> Nothing
Right (_:_) -> Just noSplit
where
-- drop the '/' between the name and prefix:
remainder = init first : rest
where
nameMax, prefixMax :: Int
nameMax = 100
prefixMax = 155
pack _ [] = Left emptyName
pack maxLen (c:cs)
| n > maxLen = Left longName
| otherwise = Right (pack' maxLen n cs)
where n = length c
pack' maxLen n (c:cs)
| n' <= maxLen = pack' maxLen n' cs
where n' = n + length c
pack' _ _ cs = cs
longPath = PackageDistInexcusable $
"The following file name is too long to store in a portable POSIX "
++ "format tar archive. The maximum length is 255 ASCII characters.\n"
++ "The file in question is:\n " ++ path
longName = PackageDistInexcusable $
"The following file name is too long to store in a portable POSIX "
++ "format tar archive. The maximum length for the name part (including "
++ "extension) is 100 ASCII characters. The maximum length for any "
++ "individual directory component is 155.\n"
++ "The file in question is:\n " ++ path
noSplit = PackageDistInexcusable $
"The following file name is too long to store in a portable POSIX "
++ "format tar archive. While the total length is less than 255 ASCII "
++ "characters, there are unfortunately further restrictions. It has to "
++ "be possible to split the file path on a directory separator into "
++ "two parts such that the first part fits in 155 characters or less "
++ "and the second part fits in 100 characters or less. Basically you "
++ "have to make the file name or directory names shorter, or you could "
++ "split a long directory name into nested subdirectories with shorter "
++ "names.\nThe file in question is:\n " ++ path
emptyName = PackageDistInexcusable $
"Encountered a file with an empty name, something is very wrong! "
++ "Files with an empty name cannot be stored in a tar archive or in "
++ "standard file systems."
-- ------------------------------------------------------------
-- * Utils
-- ------------------------------------------------------------
quote :: String -> String
quote s = "'" ++ s ++ "'"
commaSep :: [String] -> String
commaSep = intercalate ", "
dups :: Ord a => [a] -> [a]
dups xs = [ x | (x:_:_) <- group (sort xs) ]
fileExtensionSupportedLanguage :: FilePath -> Bool
fileExtensionSupportedLanguage path =
isHaskell || isC
where
extension = takeExtension path
isHaskell = extension `elem` [".hs", ".lhs"]
isC = isJust (filenameCDialect extension)
|
corngood/cabal
|
Cabal/Distribution/PackageDescription/Check.hs
|
bsd-3-clause
| 75,627 | 0 | 22 | 20,287 | 14,211 | 7,392 | 6,819 | 1,274 | 33 |
module Compile.SimpleLambda where
import Bytecode.Instruction
import Compile.Variable
-- a simple lambda is one that doesnt have a nested lambda expression
data SimpleLambda
= SimpleLambda
{ simpleLambdaSymbol :: Symbol
, simpleLambdaArity :: Int
, simpleLambdaBody :: SimpleLambdaBody
}
deriving (Show)
data SimpleLambdaBody
= SimpleApp SimpleLambdaBody SimpleLambdaBody
| SimpleVar Variable
| SimpleLit Int
deriving (Show)
|
exFalso/lambda
|
src/lib/Compile/SimpleLambda.hs
|
bsd-3-clause
| 479 | 0 | 8 | 107 | 79 | 48 | 31 | 14 | 0 |
module Tokenizer where
import Data.Char
import FPPrac
import Debug.Trace
data Token =
TokProgram
-- supporting
| TokComma
| TokSemi
| TokLcurly
| TokRcurly
| TokLparen
| TokRparen
-- assign
| TokAssign
-- int operands
| TokPlus -- can be unary
| TokMinus -- can be unary
| TokMult
| TokDivide
| TokModulo
-- bin operands
| TokOr
| TokAnd
| TokEq
| TokNeq
| TokGreat
| TokGreatEq
| TokLess
| TokLessEq
| TokNeg
-- keywords
| TokVar
| TokWhile
| TokIf
| TokElse
| TokWrite
-- identifier and numbers
| TokId String
| TokNum Number
deriving (Show,Eq)
tokenize :: String -> [Token]
tokenize xs = r
where
r = reverse $ tokenize' xs []
tokenize' :: String -> [Token] -> [Token]
tokenize' (x:xs) ts | isDigit x = tokenize' r0 (t0:ts)
| isAlphaNum x = tokenize' r1 (t1:ts)
| x `elem` ",;{}()=+-*/%!<>&|" = tokenize' r2 (t2:ts)
| isSpace x = tokenize' xs ts -- ignore spaces
| otherwise = error "failed to tokenize"
where
(t0,r0) = tokenizeNumber (x:xs)
(t1,r1) = tokenizeIdentifier (x:xs)
(t2,r2) = tokenizeHelper (x:xs)
tokenize' [] ts = ts
tokenizeHelper :: String -> (Token,String)
tokenizeHelper ([x]) | x == ',' = (TokComma,"")
| x == ';' = (TokSemi,"")
| x == '{' = (TokLcurly,"")
| x == '}' = (TokRcurly,"")
| x == '(' = (TokLparen,"")
| x == ')' = (TokRparen,"")
| x == '+' = (TokPlus,"")
| x == '-' = (TokMinus,"")
| x == '*' = (TokMult,"")
| x == '/' = (TokDivide,"")
| x == '%' = (TokModulo,"")
| otherwise = error "failed to tokenize last helper"
tokenizeHelper (x:y:xs) | x == ',' = (TokComma,(y:xs))
| x == ';' = (TokSemi,(y:xs))
| x == '{' = (TokLcurly,(y:xs))
| x == '}' = (TokRcurly,(y:xs))
| x == '(' = (TokLparen,(y:xs))
| x == ')' = (TokRparen,(y:xs))
| x == '+' = (TokPlus,(y:xs))
| x == '-' = (TokMinus,(y:xs))
| x == '*' = (TokMult,(y:xs))
| x == '/' = (TokDivide,(y:xs))
| x == '%' = (TokModulo,(y:xs))
| x == '|' && y == '|' = (TokOr,xs)
| x == '&' && y == '&' = (TokOr,xs)
| x == '=' && y == '=' = (TokEq,xs)
| x == '=' = (TokAssign,(y:xs))
| x == '<' && y == '=' = (TokLessEq,xs)
| x == '<' = (TokLess,(y:xs))
| x == '>' && y == '=' = (TokGreatEq,xs)
| x == '>' = (TokGreat,(y:xs))
| x == '!' && y == '=' = (TokNeq,xs)
| x == '!' = (TokNeg,(y:xs))
| otherwise = error "failed to tokenize helper"
tokenizeNumber :: String -> (Token,String)
tokenizeNumber xs = ((TokNum (read ts)),r)
where
(ts,r) = tokenizeNumber' BfrDot xs
data TokenizeNumberState = BfrDot | DotConsmd | AftrDot
tokenizeNumber' :: TokenizeNumberState -> String -> (String,String)
tokenizeNumber' BfrDot ('.':xs) = ('.':z,rest)
where
(z,rest) = tokenizeNumber' DotConsmd xs
tokenizeNumber' BfrDot (x:xs) | isDigit x = ((x:y), r0)
| otherwise = ("",(x:xs))
where
(y,r0) = tokenizeNumber' BfrDot xs
tokenizeNumber' DotConsmd (x:xs) | isDigit x = ((x:y), r0)
| otherwise = ("",(x:xs))
where
(y,r0) = tokenizeNumber' AftrDot xs
tokenizeNumber' AftrDot (x:xs) | isDigit x = ((x:y), r0)
| otherwise = ("",(x:xs))
where
(y,r0) = tokenizeNumber' AftrDot xs
tokenizeIdentifier :: String -> (Token,String)
tokenizeIdentifier xs | ts == "var" = (TokVar,r)
| ts == "while" = (TokWhile,r)
| ts == "if" = (TokIf,r)
| ts == "else" = (TokElse,r)
| ts == "write" = (TokWrite,r)
| otherwise = ((TokId ts),r)
where
(ts,r) = tokenizeIdentifier' AccptAlpha xs
data TokenizeIdentifierState = AccptAlpha | AccAlphaNum
tokenizeIdentifier' :: TokenizeIdentifierState -> String -> (String,String)
tokenizeIdentifier' AccptAlpha (x:xs) | isAlpha x = (x:y, r0)
| otherwise = ("",(x:xs))
where
(y,r0) = tokenizeIdentifier' AccAlphaNum xs
tokenizeIdentifier' AccAlphaNum (x:xs) | isAlphaNum x = (x:y , r0)
| otherwise = ("",(x:xs))
where
(y,r0) = tokenizeIdentifier' AccAlphaNum xs
|
tscheepers/Simple-Compiler
|
Tokenizer.hs
|
bsd-3-clause
| 4,195 | 20 | 10 | 1,182 | 1,950 | 1,046 | 904 | 115 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
-- A place to explore some early ideas about nested data parallelism on
-- hierarchical architectures.
--------------------------------------------------------------------------------
module NDP_Hierarchical where
import Data.Map as M
import Prelude as P
import Text.PrettyPrint.GenericPretty (Out(doc,docPrec), Generic)
import System.Random as R
----------------------------------------------------
-- Example machine hierarchies:
----------------------------------------------------
quadcpu :: Level
quadcpu = Level "socket" static
[Level "core" [Sync PtToPt, Bounded 4] []]
-- OR:
-- [ Level "core1" static []
-- , Level "core2" static []
-- , Level "core3" static []
-- , Level "core4" static [] ]
-- | A GPU model. This description mixes static entities (HW) with dynamic entities
-- (with HW scheduling support) such as kernels, grids, and blocks.
gpu :: Level
gpu = Level "gpu" static
[Level "kernel" [Sync NoSync] -- Unlimited DAG of kernels.
-- No sync between code inside different kernels.
[Level "block" [Sync NoSync, Bounded$ 65535 * 65535 * 65535]
[Level "thread" [Sync Barrier, Bounded 1024] []]]]
-- Barriers allowed between threads within the same block.
-- | A complete machine could have a CPU and GPU.
machine :: Level
machine =
Level "machine" static [ quadcpu, gpu ]
static :: [LevelProp]
static = [Bounded 1]
-- Note that these examples do not currently model the separate sequential steps
-- within a thread or a core. Some tilings or static schedulings would need to refer
-- to those explicitly.
-- Example mappings:
--------------------
ex1 = OpTree FOLD () [leaf MAP, leaf FOLD]
mp1 = domap gpu ex1
mp2 = domap machine ex1
domap mach ex = do
g <- getStdGen
let (mp,g') = randomMapping mach ex g
setStdGen g'
return mp
--------------------------------------------------------------------------------
-- Machine description Schemas
--------------------------------------------------------------------------------
-- | A level of a hierarchical machine:
data Level =
Level
{ name :: String
, props :: [LevelProp]
, children :: [Level]
}
deriving (Eq, Show, Read, Ord, Generic)
-- | The properties of one level of a machine hierarchy.
data LevelProp =
Bounded Int -- | Max number of entities at this level. For now they have no
-- particular topology; only a cardinatality.
| Sync SyncCapability
| MemModel -- FinishMe
| CostModel -- FinishMe
-- TODO: topology: DAG, sequence, etc
deriving (Eq, Show, Read, Ord, Generic)
-- TODO: probably need to distinguish which levels can do work independently, if any
-- non-leaf levels can....
-- | A qualitative description of the synchronization capabalities of a piece of
-- hardware. Obviously, a barrier could be implemented with PtToPt, but that would
-- be expensive.
data SyncCapability = NoSync -- ^ Separate tasks in the parallel region may not sync.
| PtToPt -- ^ Separate tasks may sync on a pairwise point-to-point basis.
| Barrier -- ^ Separate threads/tasks may partake in a global barrier across the parallel region.
deriving (Eq, Show, Read, Ord, Generic)
-- TODO: add costs. For example, on the GPU, within a warp barriers are free,
-- whereas __syncthreads within a block is not.
--------------------------------------------------------------------------------
-- Fine grained dataflow-graph topology constraints
--------------------------------------------------------------------------------
-- This part is very speculative.
-- | Graphs of aggregate operators are coarse-grained, but induce fine-grained task
-- graphs at runtime. These constraints describe the topology of those fine-grained
-- graphs.
--
-- A far-out goal would be to be able to infer costs from the combination of these
-- constraints and the cost models associated with machines (and their memory
-- hierarchies).
data Constraint = Exists (Var -> Constraint)
| ForAll (Var -> Constraint)
| And Constraint Constraint
| Or Constraint Constraint
| Not Constraint
| Eql Operand Operand
| Leq Operand Operand
data Operand = Task Var
| Var Var
| ArrElem Var Var
-- | Simply names for now:
type Var = String
-- | The properties of an aggregate operator:
data OpProp = Ordering Constraint
| NeedsSync SyncCapability
data Op = MAP | FOLD | SCAN
deriving (Eq, Show, Read, Ord, Generic)
-- permute, backpermute, replicate, generate etc etc
opTable :: Map Op [OpProp]
opTable = M.fromList $
[ (FOLD, [ NeedsSync PtToPt,
Ordering (fc "arr")]) -- Need the name of the input array.
-- TODO: SCAN, MAP, etc
]
-- | The fine grained ordering constrainst for a Fold dataflow graph. Note that fold
-- is very flexible; it is not constrained to a single topology.
--
-- What this says is that for all tasks created by the fold there exist two upstream
-- dependencies for that task (two inputs), each of which may either be an array
-- element, or the intermediate result produced by another task.
--
-- NOTE: This ASSUMES that the tasks are indexed by a disjoint set of keys (numbers) than
-- the array subscripts. Otherwise the Not.Eql bit below is wrong.
fc :: Var -> Constraint
fc arr = -- Exists $ \ arr ->
ForAll $ \ i -> -- This is really limited to the scope of the fold's tasks.
Exists $ \ j ->
Exists $ \ k ->
Not (Var j `Eql` Var k) `And`
((ArrElem arr j `Leq` Task i) `Or`
(Task j `Leq` Task i)) `And`
((ArrElem arr k `Leq` Task i) `Or`
(Task k `Leq` Task i))
--------------------------------------------------------------------------------
-- Random mapping of operators onto machine levels
--------------------------------------------------------------------------------
-- A mapping with of a linearly [n] nested task on a linear [d] hierarchy is
-- straightforward (requires only computing (n+d) choose n). It's
-- much more complicated to count the possibilities when both are trees.
-- | This datatype is not a complete program, but an (easily extracted) simple
-- abstraction of the nested structure of an NDP program: namely, which operators
-- contain nested sub-operators.
--
-- The effect of traditional (NESL-style) NDP flattening transformations is to lift
-- out and flatten all nested operations which would result in none of these OpTree's
-- having any children.
data OpTree a = OpTree Op a [OpTree a]
deriving (Eq, Show, Read, Ord, Generic)
-- Note: this doesn't really account for operations with multiple lambda arguments
-- that could independently contain nested parallel computations....
leaf a = OpTree a () []
-- | An OpTree where every Op has been assigned the name of a Level
type MappedTree = OpTree String
-- | Create a random "Natural" (descending) mapping between nested ops and levels of
-- a machine hierarchy.
randomMapping :: RandomGen g => Level -> OpTree a -> g -> (MappedTree, g)
randomMapping mach optree gen = testAndDiscardLoop gen
-- TODO: first, simplest implementation. Randomly assign ops to levels and then
-- see if it is a "natural" (descending) mapping.
where
allLvls (Level str _ chldrn) = str : concatMap allLvls chldrn
lvlList = allLvls mach
numLvls = length lvlList
decorate :: [String] -> OpTree a -> OpTree String
decorate supply op = head $ fst $ decorLst supply [op]
decorLst rands [] = ([],rands)
decorLst (target:rst) (OpTree op _ ls : moreops) =
let (ls', rst') = decorLst rst ls
(moreops', rst'') = decorLst rst' moreops in
(OpTree op target ls' : moreops', rst'')
decorLst [] _ = error "impossible"
testAndDiscardLoop g =
let (g1,g2) = R.split g
randLvls = P.map (lvlList!!) $ randomRs (0,numLvls-1) g1
candidate = decorate randLvls optree
in if verifyOrdered candidate (makeLEQ mach)
then (candidate,g2)
else testAndDiscardLoop g2
-- | Returns a less-than-or-equal op to capture the depth partial order in a machine
-- hierarchy. (Root is "least".)
--
-- Really a data structure should be used to cache the transitive closure of this
-- relation. `makeLEQ` is just a naive implementation that traverses the whole tree
-- on each test.
makeLEQ mach left right
| left == right = True
| otherwise = loop mach
where
loop (Level name _ chlds)
| name == left = any (contains right) chlds
| name == right = False
| otherwise = any loop chlds
contains name (Level str _ chldrn)
| name == str = True
| otherwise = any (contains name) chldrn
-- | Assumes that the LEQ op is a valid partial order. Thus this only checks
-- child/parent neighbors in the tree.
verifyOrdered (OpTree _ tag ls) leq =
all (loop tag) ls
where
loop last (OpTree _ trg chldrn)
| last `leq` trg = all (loop trg) chldrn
| otherwise = False
--------------------------------------------------------------------------------
-- Codegen interfaces:
--------------------------------------------------------------------------------
-- The idea is that each level exposes various common concepts (parallel loops,
-- sequential loops) as well as metadata/properties.
-- Directly invoking codegen tactics at higher levels of abstraction means foregoing
-- control. It must always be possible, however, to allow randomly generated
-- mappings to succeed.
-- Case in Point: FOLD
----------------------
-- The metadata for the fold operator should make clear how many tasks it generates,
-- their valid topologies, and the sync requirements.
-- The code generator for the fold operator should deal with the level it is assigned
-- by inspecting its metadata and then invoking its methods to generate parallel or
-- sequential loops.
-- For example, a common implementation is to have a fold execute as a "block" on the
-- GPU, enabling barrier synchronization. The topology selected is not a binary
-- tree, rather it's several "straight lines" merging into a binary tree at the end.
-- That is, the first phase is for N threads to independently reduce N chunks of
-- contiguous (or interleaved) elements in the source. The second phase is the
-- binary tree: progressively fewer threads (N/2, N/4) to reduce individual pairs of
-- elements, with barriers inbetween until there is only one left.
-- (This separation of phases could be reified explicitly by transforming the program
-- from "fold f arr" into "fold f (map (fold f) (split N arr))".)
-- So a typical codegen for fold looks like a single kernel launching a single
-- (large) block that first performs a sequential loop and then performs the
-- alternating barriers and single reductions (with conditionals that cause half the
-- threads to drop out at each level).
-- IF Fold is assigned a high level in the hierarchy, for example bridging multiple
-- devices. Some kind of fissioning of the workload must occur, which perhaps could
-- happen via explicit program rewritting followed by patching up the mapping.
-- IF Fold is mapped to a leaf thread, then it must generate a single sequential loop.
-- IF fold is mapped to a level with parallelism but no synchronization between its
-- children, then it is again forced to fission. It can do the communication-free
-- partial reduction in parallel, but the final summing of intermediate results must
-- happen at a later point either sequentially or in parallel (with synchronization).
-- This later phase can be anywhere above or below the current level, for example in
-- a separate kernel after the current one (implicit kernel-level barriers), or on
-- the CPU host rather than the GPU.
-- [NOTE: the above, simple machine hierarchies need to do more to model the
-- synchronization that goes on between the GPU and CPU.]
--------------------------------------------------------------------------------
-- Misc utilities and boilerplate
--------------------------------------------------------------------------------
-- TODO: switch this to pretty printing:
instance Show Constraint where
show x = fst (loop x nameSupply)
where
nameSupply = P.map (("i"++) . show) [0..]
parens x = "("++ x ++ ")"
loop _ [] = error "not possible"
loop x ns@(vr:rst) =
let binop op c1 c2 =
let (c1',ns') = loop c1 ns
(c2',ns'') = loop c2 ns' in
(parens (c1' ++" "++op++" "++ c2'), ns'')
in
case x of
Exists fn -> let (str,ns') = loop (fn vr) rst in
("Exists "++vr++" . "++parens str, ns')
ForAll fn -> let (str,ns') = loop (fn vr) rst in
("ForAll "++vr++" . "++parens str, ns')
Not c1 -> let (str,ns') = loop c1 rst in
("!"++ parens str, ns')
Eql v1 v2 -> (show v1 ++ " == " ++ show v2, ns)
Leq v1 v2 -> (show v1 ++ " <= " ++ show v2, ns)
And c1 c2 -> binop "&&" c1 c2
Or c1 c2 -> binop "||" c1 c2
instance Show Operand where
show (Task i) = i
show (Var i) = i
show (ArrElem a i) = a ++ "["++i++"]"
instance Out Level
instance Out LevelProp
instance Out SyncCapability
instance Out Op
instance Out a => Out (OpTree a)
|
iu-parfunc/ndp_brainstorm
|
NDP_Hierarchical.hs
|
bsd-3-clause
| 13,545 | 0 | 20 | 3,030 | 2,179 | 1,194 | 985 | 142 | 4 |
module GitRunner ( GitRepo(..)
, GitCommand(..)
, gitIsDirtyTree
, buildGitRevisionString
, runGit
) where
import System.IO
import System.Process
import System.Exit
data GitRepo = GitRepo { gitRepoPath :: FilePath
} deriving (Show, Eq)
data GitCommand = GitRevisionInfoCmd
| GitUpdateIndexCmd
| GitDiffIndexNamesCmd
deriving (Show, Eq)
gitCommandArgs :: GitCommand -> [String]
gitCommandArgs GitUpdateIndexCmd = ["update-index", "-q", "--refresh"]
gitCommandArgs GitDiffIndexNamesCmd = ["diff-index", "--name-only", "HEAD", "--"]
gitCommandArgs GitRevisionInfoCmd = ["describe", "--tags", "--dirty"]
buildGitRevisionString :: GitRepo -> IO String
buildGitRevisionString repo = do
(_, revStr) <- runGit repo GitRevisionInfoCmd
return revStr
gitIsDirtyTree :: GitRepo -> IO Bool
gitIsDirtyTree repo = do
(_, _) <- runGit repo GitUpdateIndexCmd
(_, output) <- runGit repo GitDiffIndexNamesCmd
let changedFiles = length output
return $ changedFiles > 0
concatenateArgs :: [String] -> String
concatenateArgs [] = []
concatenateArgs (x:xs) = x ++ " " ++ concatenateArgs xs
runGit :: GitRepo -> GitCommand -> IO (ExitCode, String)
runGit repo command = do
let gitArgs = gitCommandArgs command
repoDir = gitRepoPath repo
out = CreatePipe
err = CreatePipe
d = Just repoDir
(_, Just gitOutputStream, Just gitErrorStream, procHandle) <- createProcess (proc "git" gitArgs) {std_out = out, std_err = err,cwd = d}
exitCode <- waitForProcess procHandle
case exitCode of
ExitSuccess -> do
gitOutputStr <- hGetContents gitOutputStream
let trimmedOutput | length gitOutputStr > 0 = init gitOutputStr
| otherwise = gitOutputStr
return (ExitSuccess, trimmedOutput)
(ExitFailure errorCode) -> do
gitErrorStr <- hGetContents gitErrorStream
let msg = "`git " ++ (concatenateArgs gitArgs) ++ "` failed with code " ++ (show errorCode) ++ " and message:\n" ++ gitErrorStr
return (exitCode, msg)
|
kaitanie/irt
|
src/GitRunner.hs
|
bsd-3-clause
| 2,144 | 0 | 21 | 528 | 613 | 319 | 294 | 50 | 2 |
module Exercises where
-- (2.12) page 82
-- let x = 3; y = 1000 in x * 3 + y
op1 = x * 3 + y
where x = 3
y = 1000
-- let y = 10; x = 10 * 5 + y in x * 5
op2 = x * 5
where y = 10
x = 10 * 5 * y
-- let x = 7; y = negate x; z = y * 10 in z / x + y
op3 = z / x + y
where x = 7
y = negate x
z = y * 10
-- (2.13) More fun with function
waxOn = x * 5
where z = 7
x = y ^ 2
y = z + 8
triple x = x * 3
waxOff x = x * 3
|
dsaenztagarro/haskellbook
|
src/chapter2/Exercises.hs
|
bsd-3-clause
| 478 | 0 | 8 | 212 | 158 | 89 | 69 | 17 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Prosper.User
( User(..)
) where
import Control.Monad (when)
import Data.ByteString (ByteString)
-- | User info for Prosper data
data User = User
{ username :: !ByteString -- ^ Username for Prosper
, password :: !ByteString -- ^ Password for Prosper
} deriving (Show, Eq)
|
WraithM/prosper
|
src/Prosper/User.hs
|
bsd-3-clause
| 414 | 0 | 9 | 121 | 74 | 46 | 28 | 14 | 0 |
module Astro.Trajectory.CroppedSpec where
import Test.Hspec
--import Test.QuickCheck (property, (==>))
import TestUtil
import TestInstances
import Astro.Trajectory
import Astro.Trajectory.Cropped
import Astro.Trajectory.EphemTrajectory
import qualified Prelude
import Numeric.Units.Dimensional.Prelude
import Astro.Time
import Astro.Time.At
import Data.AEq
import Data.List
import Astro.Orbit.MEOE
import Astro.Orbit.Types
main = hspec spec
spec = do
spec_uncropped
spec_croppedStartTime
spec_croppedEndTime
spec_badValidity
spec_croppedEphemeris
spec_croppedEphemeris'
-- Don't think using QuickCheck is warranted in this case as
-- the test MEOEs are very random. We implement our own 'property'
-- instead which feed the test MEOEs.
property f = f testM1 testM2
-- ----------------------------------------------------------------------
-- TODO move this spec elsewhere!
spec_uncropped = describe "Uncropped trajectory" $ do
it "does not change startTime when not cropping"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (crop' Nothing Nothing t) `shouldBe` t2)
it "does not change endTime when not cropping"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (crop' Nothing Nothing t) `shouldBe` t4)
it "does not change ephemeris' when not cropping"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t0 t5 dt
== ephemeris' t t0 t5 dt)
it "returns no ephemeris' beyond lower validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t0 t1 dt == [])
it "returns no ephemeris' beyond upper validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t4 t5 dt == [])
it "does not change ephemeris when not cropping"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = [t0, t0 `addTime` dt..t5] in
ephemeris (crop' Nothing Nothing t) ts == ephemeris t ts)
it "returns no ephemeris beyond lower validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = [t0, t0 `addTime` dt..t1] in
ephemeris (crop' Nothing Nothing t) ts == [])
it "returns no ephemeris beyond upper validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = [t4, t4 `addTime` dt..t5] in
ephemeris (crop' Nothing Nothing t) ts == [])
-- ----------------------------------------------------------------------
spec_croppedStartTime = describe "Cropped trajectory startTime" $ do
it "does not change when cropping before validity"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropStart t1 t) `shouldBe` t2)
it "does not change when cropping endTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropEnd t3 t) `shouldBe` t2)
it "changes when cropping startTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropStart t3 t) `shouldBe` t3)
-- ----------------------------------------------------------------------
spec_croppedEndTime = describe "Cropped trajectory endTime" $ do
it "does not change when cropping after validity"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (crop t1 t5 t) `shouldBe` t4)
it "does not change when cropping startTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (cropStart t3 t) `shouldBe` t4)
it "changes when cropping endTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (cropEnd t3 t) `shouldBe` t3)
spec_badValidity = describe "Trajectory with cropping disjunct from validity" $ do
it "doesn't generate ephemeris when cropped early"
(let t = crop t1 t2 $ ET [undefined `At` t3, undefined `At` t4] in
ephemeris t [startTime t, endTime t] `shouldBe` [])
it "doesn't generate ephemeris when cropped late"
(let t = crop t3 t4 $ ET [undefined `At` t1, undefined `At` t2] in
ephemeris t [startTime t, endTime t] `shouldBe` [])
it "doesn't generate ephemeris' when cropped early"
(let t = crop t1 t2 $ ET [undefined `At` t3, undefined `At` t4] in
ephemeris' t (startTime t) (endTime t) dt `shouldBe` [])
it "doesn't generate ephemeris' when cropped late"
(let t = crop t3 t4 $ ET [undefined `At` t1, undefined `At` t2] in
ephemeris' t (startTime t) (endTime t) dt `shouldBe` [])
-- ----------------------------------------------------------------------
spec_croppedEphemeris' = describe "Cropped trajectory (ephemeris')" $ do
it "does not change when cropping beyond validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' t t0 t5 dt
== ephemeris' (crop t1 t4 t) t0 t5 dt)
it "with cropped startTime is not equal to uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (crop t2 t4 t) t0 t5 dt
/= ephemeris' t t0 t5 dt))
it "with cropped startTime is suffix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (crop t2 t4 t) t0 t5 dt
`isSuffixOf` ephemeris' t t0 t5 dt))
it "with cropped endTime is not equal to of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (cropEnd t2 t) t0 t5 dt
/= ephemeris' t t0 t5 dt))
it "with cropped endTime is prefix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (cropEnd t2 t) t0 t5 dt
`isPrefixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is not prefix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
not (ephemeris' (crop t2 t3 t) t0 t5 dt
`isPrefixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is not suffix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
not (ephemeris' (crop t2 t3 t) t0 t5 dt
`isSuffixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is infix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
(ephemeris' (crop t2 t3 t) t0 t5 dt
`isInfixOf` ephemeris' t t0 t5 dt))
it "returns no ephemeris' beyond lower validity"
(property $ \m m' -> let t = ET [m `At` t0, m' `At` t5] in
ephemeris' (crop t2 t3 t) t0 t1 dt == [])
it "returns no ephemeris' beyond upper validity"
(let t = ET [undefined `At` t0, undefined `At` t5] in
ephemeris' (crop t2 t3 t) t4 t5 dt `shouldBe` [])
-- ----------------------------------------------------------------------
spec_croppedEphemeris = describe "Cropped trajectory (ephemeris)" $ do
it "does not change when cropping beyond validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = [t0, t0 `addTime` dt..t5] in
ephemeris t ts == ephemeris (crop t1 t4 t) ts)
it "with cropped startTime is not equal to uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = [t0, t0 `addTime` dt..t5] in
ephemeris (crop t2 t4 t) ts /= ephemeris t ts)
it "with cropped startTime is suffix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = [t0, t0 `addTime` dt..t5] in
(ephemeris (crop t2 t4 t) ts
`isSuffixOf` ephemeris t ts))
it "with cropped endTime is not equal to of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = [t0, t0 `addTime` dt..t5] in
ephemeris (cropEnd t2 t) ts /= ephemeris t ts)
it "with cropped endTime is prefix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = [t0, t0 `addTime` dt..t5] in
(ephemeris (cropEnd t2 t) ts
`isPrefixOf` ephemeris t ts))
it "with cropped start and end is not prefix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
not (ephemeris (crop t2 t3 t) ts
`isPrefixOf` ephemeris t ts))
it "with cropped start and end is not suffix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
not (ephemeris (crop t2 t3 t) ts
`isSuffixOf` ephemeris t ts))
it "with cropped start and end is infix of uncropped trajectory (ephemeris')"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
(ephemeris (crop t2 t3 t) ts
`isInfixOf` ephemeris t ts))
it "returns no ephemeris beyond lower validity"
(let t = ET [undefined `At` t0, undefined `At` t5]
ts = [t0, t0 `addTime` dt..t1]
in ephemeris (crop t2 t3 t) ts `shouldBe` [])
it "returns no ephemeris beyond upper validity"
(let t = ET [undefined `At` t0, undefined `At` t5]
ts = [t4, t4 `addTime` dt..t5]
in ephemeris (crop t2 t3 t) ts `shouldBe` [])
-- ----------------------------------------------------------------------
t0 = mjd 0.0 UT1
t1 = mjd 1 UT1
t2 = mjd 2 UT1
t3 = mjd 3 UT1
t4 = mjd 4 UT1
t5 = mjd 5 UT1
dt = 1 *~ hour :: Time Double
-- Test data, essentially randomized.
testM1 = MEOE { mu = 5.5017577174388266e9 *~ (meter^pos3/second^pos2)
, p = 0.7865893064609859 *~meter, f = 0.6398323179864169*~one
, g = 0.0996399428802211 *~one, h = (-0.7813921023837359)*~one
, k = 0.7396666870016642 *~one
, longitude = Long { long = 0.811762241416502*~one }
}
testM2 = MEOE { mu = 4.5017577174388266e9 *~ (meter^pos3/second^pos2)
, p = 0.6865893064609859 *~meter, f = 0.2398323179864169*~one
, g = 0.1996399428802211 *~one, h = (-0.0813921023837359)*~one
, k = 0.1396666870016642 *~one
, longitude = Long { long = 2.811762241416502*~one }
}
|
bjornbm/astro-orbit
|
test/Astro/Trajectory/CroppedSpec.hs
|
bsd-3-clause
| 9,995 | 0 | 19 | 2,434 | 3,677 | 1,979 | 1,698 | 188 | 1 |
module Hilt.Handles.Cache
(module Hilt.Handles.Cache) where
import Data.Text (Text)
data Handle = Handle
{ insert :: Text -> Text -> IO ()
, lookup :: Text -> IO (Maybe Text)
, delete :: Text -> IO ()
, keys :: IO [Text]
, size :: IO Int
}
|
supermario/hilt
|
src/Hilt/Handles/Cache.hs
|
bsd-3-clause
| 260 | 0 | 12 | 67 | 109 | 62 | 47 | 9 | 0 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ViewPatterns #-}
module Mistral.TypeCheck.Patterns (
withPatEnv
, genMatch
, tcLit
) where
import Mistral.ModuleSystem.Name
import qualified Mistral.Parser.AST as P
import Mistral.TypeCheck.AST
import Mistral.TypeCheck.Env
import Mistral.TypeCheck.Monad
import Mistral.Utils.Names
import Mistral.Utils.Panic ( panic )
import Mistral.Utils.Source
import Control.Monad ( replicateM, guard )
import Data.Foldable ( foldlM, foldrM )
import Data.List ( partition, groupBy )
import Data.Monoid ( mempty )
import qualified Data.Set as Set
tcPanic :: [String] -> a
tcPanic = panic "Mistral.TypeCheck.Patterns"
-- Patterns --------------------------------------------------------------------
-- | Run a computation with the environment generated by a parsed pattern.
withPatEnv :: Type -> P.Pattern -> TC a -> TC a
withPatEnv sty pat body =
do env <- applySubst =<< genEnv mempty (pat,sty)
withEnv env body
where
genEnv env (p,pty) = case p of
P.PVar v ->
do return (addSchema v (mkSchema pty) env)
P.PCon n ps ->
do (_,nty) <- freshVarType =<< lookupSchema n
let len = length ps
ptys <- replicateM len freshTVar
unify nty (foldr tarrow pty ptys)
foldlM genEnv env (zip ps ptys)
P.PTuple ps ->
do ptys <- replicateM (length ps) freshTVar
unify pty (ttuple ptys)
foldlM genEnv env (zip ps ptys)
P.PWildcard ->
return env
P.PLit lit ->
do tcLit lit pty
return env
P.PSource src pat' ->
withSource src (genEnv env (pat',pty))
-- | Checking for literals.
--
-- XXX Literals are not all monomorphic.
-- Numbers are obvious. Also, colon can be Time or IPv6.
tcLit :: Literal -> Type -> TC ()
tcLit lit ety = case lit of
LTime _ -> unify ety timeCon
LColonDot _ -> unify ety timeCon
LColonSlash _ -> unify ety ipv6MaskCon
LDotSlash _ -> unify ety ipv4MaskCon
LColon _ -> unify ety timeCon
LDot _ -> unify ety ipv4Con
LNum _ _ -> unify ety intCon
LAtom _ -> unify ety atomCon
LString _ -> unify ety stringCon
-- Pattern Matching Compilation ------------------------------------------------
genMatch :: [(Expr,Type)] -> [(P.Pattern,Match Pattern)] -> TC (Match Pattern)
genMatch args arms =
do m <- match args [ ([noSource pat], m) | (pat,m) <- arms ] MFail
return (cleanupMatch m)
-- | Unroll nested patterns.
match :: [(Expr,Type)] -> [([P.Pattern],Match Pattern)] -> Match Pattern
-> TC (Match Pattern)
match args arms err = case args of
(s,sty):args'
-- when matching a tuple literal against a tuple pattern, go ahead and
-- unroll the pattern and the literal
| EMkTuple es <- s
, Just arms' <- mapM (unpackTuple (length es)) arms ->
do let tupleArgs = [ (e,ty) | (ty,e) <- es ]
match (tupleArgs ++ args') arms' err
-- the variable rule: all leading patterns are variables, or wildcards,
-- allowing the pattern match to be translated into a renaming.
| Just arms' <- mapM varRule arms ->
match args' (map (rename s sty) arms') err
-- the constructor rule: all leading patterns are constructor patterns.
-- group similar constructors together, preserving matching order
| Just arms' <- mapM constructorRule arms ->
do cons <- mapM (matchCon sty args' err) (groupCons arms')
return (MCase s sty (foldr MSplit err cons))
-- all leading patterns are literals. group similar literals together,
-- preserving matching order, generating sub-matches
| Just arms' <- mapM litRule arms ->
do ms <- mapM (matchLit args' err) (groupLits arms')
return (MCase s sty (foldr MSplit err ms))
-- gather all tuple patterns together. as tuple patterns are irrefutable,
-- this boils down to one case.
| Just arms' <- mapM tupleRule arms ->
do body <- matchTuple sty args' err arms'
return (MCase s sty body)
-- the mixture rule: the set of leading patterns are a combination of
-- variable introductions, wildcards, and constructor patterns.
| otherwise ->
mixtureRule args arms err
-- no arguments left, and all patterns are empty
[] | (pats,ms) <- unzip arms
, all null pats ->
return (foldr1 MSplit ms)
-- something went wrong
_ -> tcPanic [ "pattern matching bug", show args, show arms ]
where
-- rename the expression being cased over in the body of the current arm.
rename s sty (Just n, ps, m) = (ps, MRename n s sty m)
rename _ _ (_, ps, m) = (ps, m)
-- attempt to unpack a pattern as a tuple match, unrolling the match into
-- left-to-right matches.
unpackTuple arity (ps,m) = case ps of
P.PTuple qs:rest -> return (qs ++ rest, m)
P.PWildcard:rest -> return (replicate arity P.PWildcard ++ rest,m)
_ -> Nothing
-- check to see if the variable rule will apply to this branch
varRule (ps,m) = case ps of
P.PVar n : rest -> Just (Just n, rest, m)
P.PWildcard : rest -> Just (Nothing,rest, m)
_ -> Nothing
-- the pattern begins with a constructor
constructorRule (ps,m) = case ps of
P.PCon n qs : rest -> Just ((n,qs), (rest, m))
_ -> Nothing
-- the pattern is a literal
litRule (ps,m) = case ps of
P.PLit lit : rest -> Just (lit, (rest,m))
_ -> Nothing
tupleRule (ps,m) = case ps of
P.PTuple qs : rest -> Just (qs, (rest,m))
_ -> Nothing
-- group similar constructors together, preserving the matching order.
groupCons as = case as of
-- group like-patterns together
arm@((n,qs), _) : rest ->
let (likes,others) = partition (isCon n) rest
in (n, length qs, map extendPats (arm:likes)) : groupCons others
[] -> []
where
isCon n ((n',_),_) = n == n'
extendPats ((_,qs),(ps,m)) = (qs ++ ps, m)
-- group similar literals together, preserving the matching order
groupLits as = case as of
arm@(lit, _) : rest ->
let (likes,others) = partition (sameLit lit) rest
in (lit, map dropLit (arm:likes)) : groupLits others
[] -> []
where
sameLit l (l',_) = l == l'
dropLit (_,arm) = arm
-- | Turn a group of construtor patterns into a single match.
matchCon :: Type -> [(Expr,Type)] -> Match Pattern
-> (Name,Int,[([P.Pattern], Match Pattern)]) -> TC (Match Pattern)
matchCon sty args err (n,arity,arms) =
do -- if goals were emitted here, we would somehow have something like a GADT,
-- or existentially quantified constraint
((_,nty),_) <- collectGoals (freshVarType =<< lookupSchema n)
-- introduce fresh variables for each field of the constructor
ns <- replicateM arity freshName
ntys <- replicateM arity freshTVar
unify nty (foldr tarrow sty ntys)
-- expand the patterns for this constructor, then add a final
body <- match (zip (map EVar ns) ntys ++ args) arms err
-- create the pattern
let ps = zipWith Param ns ntys
return (MPat (PCon n (map Just ps)) body)
-- | Given a group of matches to the same literal, generate the branch that
-- represents that match. For example:
--
-- case ... of
-- ...
-- (0,X) -> ...
-- (0,Y) -> ...
-- (1,Z) -> ...
-- ...
--
-- will translate (loosely) to
--
-- case ... of
-- ...
-- t -> case t of
-- (a,b) -> case a of
-- 0 -> case b of
-- X -> ...
-- Y -> ...
-- _ -> FAIL
-- 1 -> case b of
-- Z -> ...
-- _ -> FAIL
--
-- It's worth noting that you can replace the numeric literals with any other
-- kind of literal, or constructor match, the principle remains the same.
matchLit :: [(Expr,Type)] -> Match Pattern
-> (Literal, [([P.Pattern], Match Pattern)]) -> TC (Match Pattern)
matchLit args err (lit,arms) =
do body <- match args arms err
return (MPat (PLit lit) body)
-- | Unpack the tuple using a PTuple, then continue matching all other branches.
-- The reason that this produces a case with a single branch is that the tuple
-- matches are irrefutable -- they can only ever match successfully.
matchTuple :: Type -> [(Expr,Type)] -> Match Pattern
-> [([P.Pattern], ([P.Pattern],Match Pattern))] -> TC (Match Pattern)
matchTuple sty args err tarms = case tarms of
-- shouldn't be possible
[] -> tcPanic [ "matchTuple: no match arms" ]
(qs,_):_ ->
do let arity = length qs
ns <- replicateM arity freshName
ntys <- replicateM arity freshTVar
unify sty (ttuple ntys)
let unpack (ps,(ps',m)) = (ps ++ ps', m)
body <- match (zip (map EVar ns) ntys ++ args) (map unpack tarms) err
let ps = zipWith Param ns ntys
return (MPat (PTuple (map Just ps)) body)
-- The Mixture Rule ------------------------------------------------------------
mixtureRule :: [(Expr,Type)] -> [([P.Pattern], Match Pattern)]
-> Match Pattern -> TC (Match Pattern)
mixtureRule args arms err = foldrM (match args) err (groupBy bothWild arms)
where
-- groups wild things together, and non-wild things together
bothWild a b = (wa && wb) || (not wa && not wb)
where
wa = isWild a
wb = isWild b
isWild (ps,_) = case ps of
P.PVar _ : _ -> True
P.PWildcard : _ -> True
_ -> False
-- Cleanup ---------------------------------------------------------------------
-- | Remove unused variable bindings from a Match.
--
-- XXX it would be nice if this emitted warnings about the variables being
-- unused.
cleanupMatch :: Match Pattern -> Match Pattern
cleanupMatch m0 = fst (go m0)
where
go m = case m of
MCase s sty body ->
let (body',used) = go body
in (MCase s sty body', freeVars s `Set.union` used)
MRename n e ety body ->
let (body',used) = go body
in if n `Set.member` used
then (MRename n e ety body', freeVars e `Set.union` used)
else (body', used)
MPat (PCon n ns) body ->
let (body',used) = go body
checkUsed mb = do param <- mb
guard (pName param `Set.member` used)
return param
in (MPat (PCon n (map checkUsed ns)) body', used)
MPat (PTuple ps) body ->
let (body',used) = go body
checkUsed mb = do param <- mb
guard (pName param `Set.member` used)
return param
in (MPat (PTuple (map checkUsed ps)) body', used)
MPat p body ->
let (body',used) = go body
in (MPat p body', used)
-- prune off the right branch when the left is irrefutable
MSplit l r ->
let (l',lu) = go l
(r',ru) = go r
in if irrefutable l'
then (l', lu)
else (MSplit l' r', lu `Set.union` ru)
MExpr e -> (m, freeVars e)
MGuard g body ->
let (body',used) = go body
in (MGuard g body', freeVars g `Set.union` used)
MFail -> (m,Set.empty)
|
GaloisInc/mistral
|
src/Mistral/TypeCheck/Patterns.hs
|
bsd-3-clause
| 11,170 | 0 | 19 | 3,143 | 3,506 | 1,814 | 1,692 | 194 | 13 |
--------------------------------------------------------------------------------
-- | Exports a datastructure for the top-level hakyll configuration
module Hakyll.Core.Configuration
( Configuration (..)
, shouldIgnoreFile
, defaultConfiguration
) where
--------------------------------------------------------------------------------
import Data.Default (Default (..))
import Data.List (isPrefixOf, isSuffixOf)
import System.Directory (canonicalizePath)
import System.Exit (ExitCode)
import System.FilePath (isAbsolute, normalise, takeFileName)
import System.IO.Error (catchIOError)
import System.Process (system)
--------------------------------------------------------------------------------
data Configuration = Configuration
{ -- | Directory in which the output written
destinationDirectory :: FilePath
, -- | Directory where hakyll's internal store is kept
storeDirectory :: FilePath
, -- | Directory in which some temporary files will be kept
tmpDirectory :: FilePath
, -- | Directory where hakyll finds the files to compile. This is @.@ by
-- default.
providerDirectory :: FilePath
, -- | Function to determine ignored files
--
-- In 'defaultConfiguration', the following files are ignored:
--
-- * files starting with a @.@
--
-- * files starting with a @#@
--
-- * files ending with a @~@
--
-- * files ending with @.swp@
--
-- Note that the files in 'destinationDirectory' and 'storeDirectory' will
-- also be ignored. Note that this is the configuration parameter, if you
-- want to use the test, you should use 'shouldIgnoreFile'.
--
ignoreFile :: FilePath -> Bool
, -- | Here, you can plug in a system command to upload/deploy your site.
--
-- Example:
--
-- > rsync -ave 'ssh -p 2217' _site [email protected]:hakyll
--
-- You can execute this by using
--
-- > ./site deploy
--
deployCommand :: String
, -- | Function to deploy the site from Haskell.
--
-- By default, this command executes the shell command stored in
-- 'deployCommand'. If you override it, 'deployCommand' will not
-- be used implicitely.
--
-- The 'Configuration' object is passed as a parameter to this
-- function.
--
deploySite :: Configuration -> IO ExitCode
, -- | Use an in-memory cache for items. This is faster but uses more
-- memory.
inMemoryCache :: Bool
, -- | Override default port for preview server. Default is 8000.
-- One can also override the port as a command line argument:
-- ./site preview -p 1234
previewPort :: Int
}
--------------------------------------------------------------------------------
instance Default Configuration where
def = defaultConfiguration
--------------------------------------------------------------------------------
-- | Default configuration for a hakyll application
defaultConfiguration :: Configuration
defaultConfiguration = Configuration
{ destinationDirectory = "_site"
, storeDirectory = "_cache"
, tmpDirectory = "_cache/tmp"
, providerDirectory = "."
, ignoreFile = ignoreFile'
, deployCommand = "echo 'No deploy command specified' && exit 1"
, deploySite = system . deployCommand
, inMemoryCache = True
, previewPort = 8000
}
where
ignoreFile' path
| "." `isPrefixOf` fileName = True
| "#" `isPrefixOf` fileName = True
| "~" `isSuffixOf` fileName = True
| ".swp" `isSuffixOf` fileName = True
| otherwise = False
where
fileName = takeFileName path
--------------------------------------------------------------------------------
-- | Check if a file should be ignored
shouldIgnoreFile :: Configuration -> FilePath -> IO Bool
shouldIgnoreFile conf path = orM
[ inDir (destinationDirectory conf)
, inDir (storeDirectory conf)
, inDir (tmpDirectory conf)
, return (ignoreFile conf path')
]
where
path' = normalise path
absolute = isAbsolute path
inDir dir
| absolute = do
dir' <- catchIOError (canonicalizePath dir) (const $ return dir)
return $ dir' `isPrefixOf` path'
| otherwise = return $ dir `isPrefixOf` path'
orM :: [IO Bool] -> IO Bool
orM [] = return False
orM (x : xs) = x >>= \b -> if b then return True else orM xs
|
freizl/freizl.github.com-old
|
src/Hakyll/Core/Configuration.hs
|
bsd-3-clause
| 4,747 | 0 | 14 | 1,341 | 643 | 382 | 261 | 57 | 3 |
module Data.WithClass.Derive.MData (makeMDataAbstract,makeMData,ctorDeclsTypes,ctorDeclVars,app2,app4,mkCtorExp,mkCtorPat) where
import Language.Haskell as H
import Data.Derive.Internal.Derivation
import Data.List
-- * Derive opaque @MData@ instances (default @gfoldl@ and empty declarations for the remaining functions)
makeMDataAbstract :: Derivation
makeMDataAbstract = derivationCustom "MData" $ \(q,d) -> Right $ makeMDataAbstractInstance q d
makeMDataAbstractInstance :: ModuleName -> DataDecl -> [Decl]
makeMDataAbstractInstance q d = [InstDecl sl Nothing [] ctx (mdataQual $ Ident "MData") [ct,m,sig] []]
where
vars = dataDeclVars d
ctors = dataDeclCtors d
tys = nub $ ctorDeclsTypes ctors
ctx = ClassA (UnQual $ Ident "Sat") [TyApp ct sig] : ClassA (UnQual $ Ident "Monad") [m] : concatMap (\var -> [ClassA (UnQual $ Ident "DeepTypeable") [TyVar $ Ident var]]) vars ++ map (\ty -> ClassA (UnQual $ Ident "MData") [ct,m,ty]) tys
sig = foldl (\ty var -> TyApp ty (TyVar $ Ident var)) (TyCon $ UnQual $ Ident $ dataDeclName d) vars
-- * Derive @MData@ instances for algebraic data types
makeMData :: Derivation
makeMData = derivationCustom "MData" $ \(q,d) -> Right $ makeMDataInstance q d
makeMDataInstance :: ModuleName -> DataDecl -> [Decl]
makeMDataInstance q d = [InstDecl sl Nothing [] ctx (mdataQual $ Ident "MData") [ct,m,sig] (makeMDataGfoldl ctors ++ makeMDataGunfold ctors ++ makeMDataToConstr ctors ++ makeMDataDataTypeOf q (dataDeclName d) ctors)]
where
vars = dataDeclVars d
ctors = dataDeclCtors d
tys = nub $ ctorDeclsTypes ctors
ctx = ClassA (UnQual $ Ident "Sat") [TyApp ct sig]
: ClassA (UnQual $ Ident "Monad") [m]
: map (\ty -> ClassA (UnQual $ Ident "MData") [ct,m,ty]) tys
++ concatMap (\var -> [ClassA (UnQual $ Ident "DeepTypeable") [TyVar $ Ident var]]) vars
sig = foldl (\ty var -> TyApp ty (TyVar $ Ident var)) (TyCon $ UnQual $ Ident $ dataDeclName d) vars
--concatMap (\var -> [ClassA (mdataQual $ Ident "MData") [ct,m,TyVar $ Ident var],ClassA (UnQual $ Ident "Typeable") [TyVar $ Ident var]]) vars ++
makeMDataGfoldl :: [CtorDecl] -> [InstDecl]
makeMDataGfoldl ctors = [InsDecl $ FunBind $ map makeMDataGfoldl' ctors]
makeMDataGfoldl' :: CtorDecl -> Match
makeMDataGfoldl' c = Match sl (Ident "gfoldl") [PVar $ Ident "ctx",PVar $ Ident "k",PVar $ Ident "z",mkCtorPat c] Nothing (UnGuardedRhs expr) (BDecls [])
where
expr = foldl (\e var -> e `rightArrHighApp` (flipk `App` (returnExpr $ Var $ UnQual $ Ident var))) ((Var $ UnQual $ Ident "z") `App` makeFoldZFun c) cvars
cvars = ctorDeclVars c
flipk = Paren $ (Var $ UnQual $ Ident "flip") `App` (Var $ UnQual $ Ident "k")
makeFoldZFun c = foldr (\(mvar,var) e -> Paren $ Lambda sl [PVar $ Ident mvar] $ InfixApp (Var $ UnQual $ Ident mvar) (QVarOp $ UnQual $ Ident ">>=") $ Lambda sl [PVar $ Ident var] $ returnExpr e) (mkCtorExp c) (zip (ctorDeclMVars c) (ctorDeclVars c))
rightArrHighApp e1 e2 = InfixApp e1 (QVarOp $ UnQual $ Ident ">>=") e2
returnExpr e = App (Var $ UnQual $ Ident "return") e
makeMDataGunfold :: [CtorDecl] -> [InstDecl]
makeMDataGunfold ctors = [InsDecl $ FunBind [Match sl (Ident "gunfold") [PVar $ Ident "ctx",PVar $ Ident "k",PVar $ Ident "z",PVar $ Ident "c"] Nothing (UnGuardedRhs expr) (BDecls [])]]
where
expr = ((Var $ mdataQual $ Ident "constrIndex") `App` (Var $ UnQual $ Ident "c")) `Case` alts
alts = map makeMDataGunfold' $ zip [1..] ctors
makeMDataGunfold' :: (Int,CtorDecl) -> Alt
makeMDataGunfold' (i,c) = Alt sl (PLit Signless $ Int $ toEnum i) (UnGuardedRhs expr) (BDecls [])
where expr = foldl (\e var -> e `rightArrHighApp` (Var $ UnQual $ Ident "k")) ((Var $ UnQual $ Ident "z") `App` makeFoldZFun c) (ctorDeclVars c)
makeMDataToConstr :: [CtorDecl] -> [InstDecl]
makeMDataToConstr cs = [InsDecl $ FunBind $ map makeMDataToConstr' $ zip [1..] cs]
makeMDataToConstr' :: (Int,CtorDecl) -> Match
makeMDataToConstr' (i,c) = Match sl (Ident "toConstr") [PVar $ Ident "ctx",PAsPat (Ident "x") $ mkCtorPat c] Nothing (UnGuardedRhs expr) (BDecls [])
where expr = (app2 (Var $ mdataQual $ Ident "dataTypeOf") (Var $ UnQual $ Ident "ctx") (Var $ UnQual $ Ident "x")) `rightArrHighApp` (InfixApp (Var $ UnQual $ Ident "return") (QVarOp $ UnQual $ Ident ".") (app2 (Var $ UnQual $ Ident "flip") (Var $ mdataQual $ Ident "indexConstr") (Lit $ Int $ toEnum i)))
makeMDataDataTypeOf :: ModuleName -> String -> [CtorDecl] -> [InstDecl]
makeMDataDataTypeOf (ModuleName q) dname cs = [InsDecl $ FunBind [Match sl (Ident "dataTypeOf") [PVar $ Ident "ctx",PVar $ Ident "x"] Nothing (UnGuardedRhs $ returnExpr $ Var $ UnQual $ Ident "ty") (BDecls [FunBind [Match sl (Ident "ty") [] Nothing (UnGuardedRhs expr) (BDecls [])]])]]
where expr = app2 (Var $ mdataQual $ Ident "mkDataType") (Lit $ String $ q++"."++dname) (List $ map makeMDataDataTypeOf' cs)
makeMDataDataTypeOf' :: CtorDecl -> Exp
makeMDataDataTypeOf' c = app4 (Var $ mdataQual $ Ident "mkConstr") (Var $ UnQual $ Ident "ty") (Lit $ String $ ctorDeclName c) (List args) (Con $ UnQual $ Ident "Prefix")
where args = []
mkCtorPat :: CtorDecl -> Pat
mkCtorPat c = PApp (UnQual $ Ident $ ctorDeclName c) args
where args = map (\var -> PVar $ Ident var) (ctorDeclVars c)
mkCtorExp :: CtorDecl -> Exp
mkCtorExp c = Paren $ foldl (\e var -> App e $ Var $ UnQual $ Ident var) (Con $ UnQual $ Ident $ ctorDeclName c) (ctorDeclVars c)
ctorDeclVars :: CtorDecl -> [String]
ctorDeclVars c = map (\i -> "x"++show i) $ take (length $ ctorDeclFields c) [1..]
ctorDeclMVars :: CtorDecl -> [String]
ctorDeclMVars c = map (\i -> "mx"++show i) $ take (length $ ctorDeclFields c) [1..]
ct = TyVar $ Ident "ctx"
m = TyVar $ Ident "m"
mdataQual :: Name -> QName
mdataQual name = Qual (ModuleName "Data.WithClass.MData") name
dataQual :: Name -> QName
dataQual name = Qual (ModuleName "Data.Data") name
app2 x y z = App (App x y) z
app4 x y z w q = App (App (App (App x y) z) w) q
ctorDeclTypes :: CtorDecl -> [Type]
ctorDeclTypes = map (snd) . ctorDeclFields
ctorDeclsTypes :: [CtorDecl] -> [Type]
ctorDeclsTypes = concatMap ctorDeclTypes
|
cornell-pl/HsAdapton
|
syb-with-class-and-effects/src/Data/WithClass/Derive/MData.hs
|
bsd-3-clause
| 6,054 | 6 | 18 | 1,027 | 2,614 | 1,356 | 1,258 | 74 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module RDPURI (
-- Our Type
RDPURI(..),
Attribute(..),
-- The lenses
rdpuriAddress,
rdpuriAttributes,
-- Rendering
renderRDPURI,
-- Support
addAttribute,
-- Data types
ZeroOne(..), ZeroOneTwo(..)
) where
import Control.Lens
import Data.ByteString as BS
import Data.ByteString.Lens
import Data.Monoid
import Data.Text
import Data.Text.Encoding
import Network.HTTP.Types.URI
type Address = (Text, Maybe Int)
data RDPURI = RDPURI { _rdpuriAddress :: Address, _rdpuriAttributes :: [Attribute] }
data Attribute
= AllowDesktopComposition ZeroOne
| AllowFontSmoothing ZeroOne
| AlternateShell Text
| AudioMode ZeroOneTwo
| AuthenticationLevel ZeroOneTwo
| ConnectToConsole ZeroOne
| DisableCursorSettings ZeroOne
| DisableFullWindowDrag ZeroOne
| DisableMenuAnims ZeroOne
| DisableThemes ZeroOne
| DisableWallpaper ZeroOne
| DriveStoreDirect
| DesktopHeight Int
| DesktopWidth Int
| Domain Text
| GatewayHostname Text
| GateWayUsageMethod OneTwo
| PromptForCredentialsOnClient ZeroOne
| LoadBalanceInfo Text
| RedirectPrinters ZeroOne
| RemoteApplicationCmdLine Text
| RemoteApplicationMode ZeroOne
| RemoteApplicationProgram Text
| ShellWorkingDirectory Text
| UseRedirectionServerName ZeroOne
| Username Text
| ScreenModeId OneTwo
| SessionBPP BPP
| UseMultimon ZeroOne
data ZeroOne = Zero | One
data ZeroOneTwo = Zero' | One' | Two'
data OneTwo = One'' | Two''
data BPP = BPP8 | BPP15 | BPP16 | BPP24 | BPP32
makeLenses ''RDPURI
renderRDPURI :: RDPURI -> BS.ByteString
renderRDPURI (RDPURI address as) = "rdp://" <> render attributes
where attributes = renderAddress address : fmap renderAttribute as
render = BS.intercalate "&" . fmap f
where f (a,b) = urlEncode False a <> "=" <> b
addAttribute :: Attribute -> RDPURI -> RDPURI
addAttribute a = rdpuriAttributes <>~ [a]
renderAddress :: (Text, Maybe Int) -> (ByteString, ByteString)
renderAddress (a, v) = ("full address", x)
where x = "s:" <> encodeUtf8 a <> portBit
portBit = maybe "" (\i -> ":" <> (view packedChars . show $ i)) v
-- Maybe do this with a Prism? Make it 2-way
class RdpValue a where
renderValue :: a -> ByteString
instance RdpValue Text where
renderValue v = "s:" <> encodeUtf8 v
instance RdpValue ZeroOne where
renderValue Zero = "i:0"
renderValue One = "i:1"
instance RdpValue ZeroOneTwo where
renderValue Zero' = "i:0"
renderValue One' = "i:1"
renderValue Two' = "i:2"
instance RdpValue OneTwo where
renderValue One'' = "i:1"
renderValue Two'' = "i:2"
instance RdpValue Int where
renderValue i = "i:" <> (show i ^. packedChars)
instance RdpValue BPP where
renderValue BPP8 = renderValue (8::Int)
renderValue BPP15 = renderValue (15::Int)
renderValue BPP16 = renderValue (16::Int)
renderValue BPP24 = renderValue (24::Int)
renderValue BPP32 = renderValue (32::Int)
-- https://technet.microsoft.com/en-us/library/dn690096.aspx
renderAttribute :: Attribute -> (ByteString, ByteString)
renderAttribute = \case
AllowDesktopComposition z -> ("allow desktop composition", renderValue z)
AllowFontSmoothing z -> ("allow font smoothing", renderValue z)
AlternateShell z -> ("alternate shell",renderValue z)
AudioMode z -> ("audiomode", renderValue z)
AuthenticationLevel z -> ("authentication level", renderValue z)
ConnectToConsole z -> ("connect to console", renderValue z)
DisableCursorSettings z -> ("disable cursor settings", renderValue z)
DisableFullWindowDrag z -> ("disable full window drag", renderValue z)
DisableMenuAnims z -> ("disable menu anims", renderValue z)
DisableThemes z -> ("disable themes", renderValue z)
DisableWallpaper z -> ("disable wallpaper", renderValue z)
DriveStoreDirect -> ("drivestoredirect", renderValue ("*" :: Text))
DesktopHeight z -> ("desktopheight", renderValue z)
DesktopWidth z -> ("desktopwidth", renderValue z)
Domain z -> ("domain", renderValue z)
-- FullAddress z -> ("full address", renderValue z)
GatewayHostname z -> ("gatewayhostname", renderValue z)
GateWayUsageMethod z -> ("gatewayusagemethod", renderValue z)
PromptForCredentialsOnClient z -> ("prompt for credentials on client", renderValue z)
LoadBalanceInfo z -> ("loadbalanceinfo", renderValue z)
RedirectPrinters z -> ("redirectprinters", renderValue z)
RemoteApplicationCmdLine z -> ("remoteapplicationcmdline", renderValue z)
RemoteApplicationMode z -> ("remoteapplicationmode", renderValue z)
RemoteApplicationProgram z -> ("remoteapplicationprogram", renderValue z)
ShellWorkingDirectory z -> ("shell working directory", renderValue z)
UseRedirectionServerName z -> ("Use redirection server name", renderValue z)
Username z -> ("username", renderValue z)
ScreenModeId z -> ("screen mode id", renderValue z)
SessionBPP z -> ("session bpp", renderValue z)
UseMultimon z -> ("use multimon", renderValue z)
|
vlc/rdp-uri
|
src/RDPURI.hs
|
bsd-3-clause
| 5,142 | 0 | 14 | 987 | 1,362 | 737 | 625 | 119 | 29 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Pretty where
import Syntax
-- import Cegt.Rewrite
import Text.PrettyPrint
import Text.Parsec.Pos
import Data.Char
import Text.Parsec.Error(ParseError,showErrorMessages,errorPos,errorMessages)
-- import Debug.Trace
class Disp d where
disp :: d -> Doc
precedence :: d -> Int
precedence _ = 0
instance Disp Doc where
disp = id
instance Disp String where
disp x = if (isUpper $ head x) || (isLower $ head x)
then text x
else parens $ text x
-- if head x == '`'
-- then text x
-- else
instance Disp Int where
disp = integer . toInteger
dParen:: (Disp a) => Int -> a -> Doc
dParen level x =
if level >= (precedence x)
then parens $ disp x
else disp x
viewLArgs :: Exp -> [Exp]
viewLArgs (Lambda n a) =
n : viewLArgs a
viewLArgs _ = []
viewLBody :: Exp -> Exp
viewLBody (Lambda _ a) = viewLBody a
viewLBody x = x
viewFVars :: Exp -> [Name]
viewFVars (Forall n a) =
n : viewFVars a
viewFVars (Abs n a) =
n : viewFVars a
viewFVars _ = []
viewFBody :: Exp -> Exp
viewFBody (Forall _ a) = viewFBody a
viewFBody (Abs _ a) = viewFBody a
viewFBody x = x
instance Disp (Maybe Exp) where
disp Nothing = text "<>"
disp (Just e) = disp e
instance Disp Exp where
-- disp r | trace ("disp " ++ show r) False = undefined
disp (Const x p) | isUpper (head x) = disp x
| isLower (head x) = brackets $ disp x
| otherwise = disp x
disp Star = text "*"
disp (Var x p) = disp x
-- disp (Ann (Var x) Nothing) = disp x
disp (Ann e t) = parens $ disp e <+> text "::" $$ (nest 5 $ disp t)
-- disp (Ann e t) = parens $ disp x <+> text "::" <+>disp t
disp (s@(App s1 s2)) =
sep [dParen (precedence s - 1) s1,
nest 2 $ dParen (precedence s) s2]
disp (s@(TApp s1 s2)) =
sep [dParen (precedence s - 1) s1,
nest 2 $ text "@"<>dParen (precedence s) s2]
disp a@(Lambda x t) =
let vars = viewLArgs a
b = viewLBody a
ds = map helper vars
in sep [text "\\" <+> sep ds <+> text ".", nest 4 $ disp b]
where helper a@(App _ _ ) = parens $ disp a
helper a = disp a
disp (a@(Forall x f)) =
let vars = map disp $ viewFVars a
b = viewFBody a in
sep [text "forall" <+> sep vars <+> text ".", nest 2 $ disp b]
disp (a@(Abs x f)) =
let vars = map disp $ viewFVars a
b = viewFBody a in
sep [text "\\\\" <+> sep vars <+> text ".", nest 2 $ disp b]
disp (a@(Imply t1 t2)) =
sep [dParen (precedence a) t1,
text "->",
nest 2 $ dParen (precedence a - 1) t2]
disp (a@(Case e alts)) =
text "case" <+> disp e <+> text "of" $$ nest 2 (vcat (map dAlt alts))
where dAlt (p, e) =fsep [disp p <+> text "->", nest 2 $ disp e]
disp (a@(Let ds e)) =
text "let" <+> helper ds <+> text "in" $$ nest 2 (disp e)
where helper ds = vcat (map (\ (n, exp) -> disp n <+> text "=" $$ nest 2 (disp exp)) ds)
precedence (Imply _ _) = 4
precedence (Var _ _) = 12
precedence (Star) = 12
precedence (Const _ _) = 12
precedence (App _ _) = 10
precedence (TApp _ _) = 10
precedence _ = 0
instance Disp Subst where
disp (Subst sub) = disp sub
instance Disp [(Exp, Exp)] where
disp decl = vcat (map (\ (n, exp) -> disp n <+> text "::" <+> disp exp) decl)
instance Disp [(String, Exp)] where
disp decl = brackets $ vcat (map (\ (n, exp) -> text n <+> text "|->" <+> disp exp) decl)
-- disp $ map (\(x, e) -> (Var x , e)) decl
instance Disp ([Exp], Exp) where
disp (pats, e) = (sep $ map helper pats) <+> text "=" <+> disp e
where helper a@(App _ _ ) = parens $ disp a
helper a = disp a
instance Disp Decl where
disp (DataDecl n k cons) =
text "data" <+> disp n <+> text "::" <+> disp k <+> text "where" $$
nest 2 (disp cons) <+> text "\n"
disp (FunDecl f t defs) =
disp f <+> text "::" <+> disp t $$
(vcat $ map (\ x -> disp f <+> disp x) defs) <+> text "\n"
disp (Prim f t) =
text "primitive" <+> disp f <+> text "::" <+> disp t
disp (Syn f k t) =
text "type" <+> disp f <+> text "::" <+> disp k <+> text "=" <+> disp t
disp (TypeOperatorDecl op l r) = text "type" <+> disp r <+> disp l <+> disp op
disp (ProgOperatorDecl op l r) = text "prog" <+> disp r <+> disp l <+> disp op
instance Disp [Decl] where
disp ls = vcat $ map disp ls
instance Disp SourcePos where
disp sp = text (sourceName sp) <> colon <> int (sourceLine sp)
<> colon <> int (sourceColumn sp) <> colon
instance Disp ParseError where
disp pe = (disp (errorPos pe)) $$
(text "Parse Error:" $$ sem)
where sem = text $ showErrorMessages "or" "unknown parse error"
"expecting" "unexpected" "end of input"
(errorMessages pe)
printTyped pfs = vcat (map (\(f,t,e) -> disp f <+> text "::" <+>
disp t <+> text "=" $$
nest 2 (disp e) <+> text "\n")
pfs)
|
Fermat/higher-rank
|
src/Pretty.hs
|
bsd-3-clause
| 5,075 | 0 | 16 | 1,529 | 2,302 | 1,133 | 1,169 | 127 | 2 |
module AST.Module where
import Data.Binary
import qualified Data.List as List
import qualified Data.Map as Map
import Control.Applicative ((<$>),(<*>))
import qualified AST.Expression.Canonical as Canonical
import qualified AST.Declaration as Decl
import qualified AST.Type as Type
import qualified AST.Variable as Var
import AST.PrettyPrint
import qualified Elm.Compiler.Version as Compiler
import Text.PrettyPrint as P
-- HELPFUL TYPE ALIASES
type Interfaces = Map.Map Name Interface
type Types = Map.Map String Type.CanonicalType
type Aliases = Map.Map String ([String], Type.CanonicalType)
type ADTs = Map.Map String (AdtInfo String)
type AdtInfo v = ( [String], [(v, [Type.CanonicalType])] )
type CanonicalAdt = (Var.Canonical, AdtInfo Var.Canonical)
-- MODULES
type SourceModule =
Module (Var.Listing Var.Value) [Decl.SourceDecl]
type ValidModule =
Module (Var.Listing Var.Value) [Decl.ValidDecl]
type CanonicalModule =
Module [Var.Value] CanonicalBody
data Module exports body = Module
{ names :: Name
, path :: FilePath
, exports :: exports
, imports :: [(Name, ImportMethod)]
, body :: body
} deriving (Show)
data CanonicalBody = CanonicalBody
{ program :: Canonical.Expr
, types :: Types
, fixities :: [(Decl.Assoc, Int, String)]
, aliases :: Aliases
, datatypes :: ADTs
, ports :: [String]
} deriving (Show)
-- HEADERS
{-| Basic info needed to identify modules and determine dependencies. -}
data HeaderAndImports = HeaderAndImports
{ _names :: Name
, _exports :: Var.Listing Var.Value
, _imports :: [(Name, ImportMethod)]
}
type Name = [String] -- must be non-empty
nameToString :: Name -> String
nameToString = List.intercalate "."
nameIsNative :: Name -> Bool
nameIsNative name =
case name of
"Native" : _ -> True
_ -> False
-- INTERFACES
{-| Key facts about a module, used when reading info from .elmi files. -}
data Interface = Interface
{ iVersion :: String
, iExports :: [Var.Value]
, iTypes :: Types
, iImports :: [(Name, ImportMethod)]
, iAdts :: ADTs
, iAliases :: Aliases
, iFixities :: [(Decl.Assoc, Int, String)]
, iPorts :: [String]
} deriving (Show)
toInterface :: CanonicalModule -> Interface
toInterface modul =
let body' = body modul in
Interface
{ iVersion = Compiler.version
, iExports = exports modul
, iTypes = types body'
, iImports = imports modul
, iAdts = datatypes body'
, iAliases = aliases body'
, iFixities = fixities body'
, iPorts = ports body'
}
instance Binary Interface where
get = Interface <$> get <*> get <*> get <*> get <*> get <*> get <*> get <*> get
put modul = do
put (iVersion modul)
put (iExports modul)
put (iTypes modul)
put (iImports modul)
put (iAdts modul)
put (iAliases modul)
put (iFixities modul)
put (iPorts modul)
-- IMPORT METHOD
data ImportMethod
= As !String
| Open !(Var.Listing Var.Value)
deriving (Show)
open :: ImportMethod
open = Open (Var.openListing)
importing :: [Var.Value] -> ImportMethod
importing xs = Open (Var.Listing xs False)
instance Binary ImportMethod where
put method =
case method of
As alias -> putWord8 0 >> put alias
Open listing -> putWord8 1 >> put listing
get = do tag <- getWord8
case tag of
0 -> As <$> get
1 -> Open <$> get
_ -> error "Error reading valid ImportMethod type from serialized string"
-- PRETTY PRINTING
instance (Pretty exs, Pretty body) => Pretty (Module exs body) where
pretty (Module names _ exs ims body) =
P.vcat [modul, P.text "", prettyImports, P.text "", pretty body]
where
modul = P.text "module" <+> name <+> pretty exs <+> P.text "where"
name = P.text (List.intercalate "." names)
prettyImports =
P.vcat $ map prettyMethod ims
prettyMethod :: (Name, ImportMethod) -> Doc
prettyMethod import' =
case import' of
([name], As alias)
| name == alias -> P.empty
(_, As alias) -> P.text "as" <+> P.text alias
(_, Open listing) -> pretty listing
|
JoeyEremondi/utrecht-apa-p1
|
src/AST/Module.hs
|
bsd-3-clause
| 4,262 | 0 | 13 | 1,094 | 1,329 | 738 | 591 | 120 | 3 |
module Test.Integration.Scenario.Addresses
( spec
) where
import Universum
import Cardano.Wallet.API.V1.Types (Account (accAddresses),
WalletAddress (..))
import qualified Cardano.Wallet.Client.Http as Client
import Test.Integration.Framework.DSL
spec :: Scenarios Context
spec = do
scenario "address is available after it's been created" $ do
fixture <- setup defaultSetup
response <- request $ Client.postAddress $- NewAddress
noSpendingPassword
defaultAccountId
(fixture ^. wallet . walletId)
verify response
[ expectAddressInIndexOf
]
scenario "used addresses previously created can be imported" $ do
fixture <- setup defaultSetup
addr <- successfulRequest $ Client.postAddress $- NewAddress
Nothing
defaultAccountId
(fixture ^. wallet . walletId)
void $ successfulRequest $ Client.deleteWallet $- (fixture ^. wallet . walletId)
void $ successfulRequest $ Client.postWallet $- NewWallet
(fixture ^. backupPhrase)
noSpendingPassword
NormalAssurance
defaultWalletName
RestoreWallet
response <- request $ Client.importAddresses
$- fixture ^. wallet . walletId
$- [view address addr]
verify response
[ expectFieldEqual totalSuccess 1
, expectFieldEqual failures []
, \_ -> expectAddressInIndexOf (Right addr)
]
scenario "can't import addresses that aren't ours" $ do
(ourFixture, theirFixture) <- (,) <$> setup defaultSetup <*> setup defaultSetup
addrs <- sequence $
[ mkAddress (ourFixture ^. backupPhrase) 14
, mkAddress (theirFixture ^. backupPhrase) 1
, mkAddress (theirFixture ^. backupPhrase) 2
, mkAddress (theirFixture ^. backupPhrase) 3
]
response <- request $ Client.importAddresses
$- ourFixture ^. wallet . walletId
$- addrs
index <- fmap (fmap accAddresses) $ request $ Client.getAccount
$- ourFixture ^. wallet . walletId
$- defaultAccountId
verify response
[ expectFieldEqual totalSuccess 1
, expectFieldEqual failures (drop 1 addrs)
]
verify index
[ expectListSizeEqual 2 -- NOTE 2 because there's also a default address
]
scenario "can't import addresses that are already present (used or unused)" $ do
-- NOTE
-- The fixture looks a bit complexe here but in the end, we should end
-- up with two addresses:
--
-- - 1 unused, default address of the account
-- - 1 used, created by the fixture to receive the initial payment
--
-- We make sure of that by adding an extra 'verify'
fixture <- setup $ defaultSetup
& initialCoins .~ [1000000]
addrs <- fmap accAddresses $ successfulRequest $ Client.getAccount
$- fixture ^. wallet . walletId
$- defaultAccountId
verify addrs
[ expectListSizeEqual 1 . Right . filter addrUsed
, expectListSizeEqual 1 . Right . filter (not . addrUsed)
]
response <- request $ Client.importAddresses
$- fixture ^. wallet . walletId
$- map (view address) addrs
verify response
[ expectFieldEqual totalSuccess 0
, expectFieldEqual failures (map (view address) addrs)
]
|
input-output-hk/pos-haskell-prototype
|
wallet/test/integration/Test/Integration/Scenario/Addresses.hs
|
mit
| 3,653 | 0 | 19 | 1,241 | 766 | 385 | 381 | -1 | -1 |
module Main where
import Statistics.Quantile.Bench
import Statistics.Quantile.Exact
import Statistics.Quantile.Util
import Statistics.Quantile.Types
import System.IO
main :: IO ()
main = do
hSetBuffering stdin LineBuffering
hSetBuffering stdout LineBuffering
selectFromHandle median external stdin >>= print
|
fractalcat/slides
|
2015-08-26-fp-syd-approx-quantiles/approx-quantile/main/quantile.hs
|
mit
| 318 | 0 | 8 | 42 | 78 | 42 | 36 | 11 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
module Qi.Program.CF.Ipret.Gen (run) where
import Control.Lens
import Control.Monad.Freer hiding (run)
import Data.Map (Map)
import qualified Data.Map as Map
import Network.AWS.CloudFormation (Capability (CapabilityNamedIAM), StackStatus (SSCreateComplete, SSDeleteComplete, SSUpdateComplete),
StackStatus, cloudFormation,
createStack, csCapabilities,
csTemplateBody, dStackName,
deleteStack, describeStacks,
dsRetainResources, dsrsStacks,
lsrsStackSummaries, oOutputKey,
oOutputValue, sOutputs, sStackName,
sStackStatus, ssStackName,
ssStackStatus, updateStack,
usCapabilities, usTemplateBody)
import Network.AWS.S3 (BucketName (BucketName),
ObjectKey (ObjectKey))
import Protolude hiding ((<&>))
import Qi.Config.AWS
import Qi.Config.AWS.CF
import Qi.Config.AWS.S3
import Qi.Program.CF.Lang (AbsentDirective (..), CfEff (..),
StackDescription (..),
StackDescriptionDict,
StackName (StackName))
import Qi.Program.Config.Lang (ConfigEff, getConfig)
import Qi.Program.Gen.Lang
run
:: forall effs a
. (Member GenEff effs, Member ConfigEff effs)
=> (Eff (CfEff ': effs) a -> Eff effs a)
run = interpret (\case
CreateStack (StackName name) template -> do
config <- getConfig
void . amazonka cloudFormation $ createStack name
& csTemplateBody ?~ toS template
& csCapabilities .~ [ CapabilityNamedIAM ]
UpdateStack (StackName name) template -> do
config <- getConfig
void . amazonka cloudFormation $ updateStack name
& usTemplateBody ?~ toS template
& usCapabilities .~ [ CapabilityNamedIAM ]
DeleteStack (StackName name) ->
void . amazonka cloudFormation $ deleteStack name
& dsRetainResources .~ []
DescribeStacks ->
getStackDescriptions
WaitOnStackStatus name status' isAbsentOk -> do
let loop = sleep 1000000 >> go
go = do
stackDict <- getStackDescriptions
case Map.lookup name stackDict of
Just StackDescription{ status } | status == status' -> pure ()
Just _ -> loop -- wait for the stack state to change
Nothing -> case isAbsentOk of -- no mention of the stack in the log
AbsentOk -> pure () -- it's fine, don't wait any longer
NoAbsent -> loop -- keep waiting for the stack to appear in the log
go
)
where
getStackDescriptions :: Eff effs StackDescriptionDict
getStackDescriptions = do
r <- amazonka cloudFormation $ describeStacks
-- & dStackName ?~ name
pure . Map.fromList $ (\stack ->
( StackName $ stack ^. sStackName
, StackDescription {
status = stack ^. sStackStatus
, outputs = catMaybes $ (\o -> do
key <- o ^. oOutputKey
val <- o ^. oOutputValue
pure (key, val)
) <$> stack ^. sOutputs
}
)) <$> r ^. dsrsStacks
{-
updateStack
:: Text
-> AWS ()
updateStack name =
void . send $ CF.updateStack name
& usTemplateURL ?~ T.concat ["https://s3.amazonaws.com/", name, "/cf.json"]
& usCapabilities .~ [CapabilityNamedIAM]
deleteStack
:: Text
-> AWS ()
deleteStack name =
void . send $ CF.deleteStack name
& dsRetainResources .~ []
describeStack
:: Text
-> AWS StackDescription
describeStack name = do
r <- send $ CF.describeStacks
& dStackName ?~ name
case listToMaybe $ r ^ .dsrsStacks of
Just stack ->
return $ StackDescription {
sdStatus = T.pack . show $ stack^.sStackStatus
, sdOutputs = map (\o -> (fromJust $ o^.oOutputKey, fromJust $ o^.oOutputValue)) $ stack^.sOutputs
}
Nothing ->
panic "Error: no stack description was returned"
- -}
|
qmuli/qmuli
|
library/Qi/Program/CF/Ipret/Gen.hs
|
mit
| 5,096 | 0 | 25 | 2,052 | 778 | 437 | 341 | 81 | 8 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for the 'Ganeti.Common' module.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Common
( testCommon
, checkOpt
, passFailOpt
, checkEarlyExit
) where
import Test.QuickCheck hiding (Result)
import Test.HUnit
import qualified System.Console.GetOpt as GetOpt
import System.Exit
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Ganeti.BasicTypes
import Ganeti.Common
import Ganeti.HTools.Program.Main (personalities)
{-# ANN module "HLint: ignore Use camelCase" #-}
-- | Helper to check for correct parsing of an option.
checkOpt :: (StandardOptions b) =>
(a -> Maybe String) -- ^ Converts the value into a cmdline form
-> b -- ^ The default options
-> (String -> c) -- ^ Fail test function
-> (String -> d -> d -> c) -- ^ Check for equality function
-> (a -> d) -- ^ Transforms the value to a compare val
-> (a, GenericOptType b, b -> d) -- ^ Triple of value, the
-- option, function to
-- extract the set value
-- from the options
-> c
checkOpt repr defaults failfn eqcheck valfn
(val, opt@(GetOpt.Option _ longs _ _, _), fn) =
case longs of
[] -> failfn "no long options?"
cmdarg:_ ->
case parseOptsInner defaults
["--" ++ cmdarg ++ maybe "" ("=" ++) (repr val)]
"prog" [opt] [] of
Left e -> failfn $ "Failed to parse option '" ++ cmdarg ++ ": " ++
show e
Right (options, _) -> eqcheck ("Wrong value in option " ++
cmdarg ++ "?") (valfn val) (fn options)
-- | Helper to check for correct and incorrect parsing of an option.
passFailOpt :: (StandardOptions b) =>
b -- ^ The default options
-> (String -> c) -- ^ Fail test function
-> c -- ^ Pass function
-> (GenericOptType b, String, String)
-- ^ The list of enabled options, fail value and pass value
-> c
passFailOpt defaults failfn passfn
(opt@(GetOpt.Option _ longs _ _, _), bad, good) =
let first_opt = case longs of
[] -> error "no long options?"
x:_ -> x
prefix = "--" ++ first_opt ++ "="
good_cmd = prefix ++ good
bad_cmd = prefix ++ bad in
case (parseOptsInner defaults [bad_cmd] "prog" [opt] [],
parseOptsInner defaults [good_cmd] "prog" [opt] []) of
(Left _, Right _) -> passfn
(Right _, Right _) -> failfn $ "Command line '" ++ bad_cmd ++
"' succeeded when it shouldn't"
(Left _, Left _) -> failfn $ "Command line '" ++ good_cmd ++
"' failed when it shouldn't"
(Right _, Left _) ->
failfn $ "Command line '" ++ bad_cmd ++
"' succeeded when it shouldn't, while command line '" ++
good_cmd ++ "' failed when it shouldn't"
-- | Helper to test that a given option is accepted OK with quick exit.
checkEarlyExit :: (StandardOptions a) =>
a -> String -> [GenericOptType a] -> [ArgCompletion]
-> Assertion
checkEarlyExit defaults name options arguments =
mapM_ (\param ->
case parseOptsInner defaults [param] name options arguments of
Left (code, _) ->
assertEqual ("Program " ++ name ++
" returns invalid code " ++ show code ++
" for option " ++ param) ExitSuccess code
_ -> assertFailure $ "Program " ++ name ++
" doesn't consider option " ++
param ++ " as early exit one"
) ["-h", "--help", "-V", "--version"]
-- | Test parseYesNo.
prop_parse_yes_no :: Bool -> Bool -> String -> Property
prop_parse_yes_no def testval val =
forAll (elements [val, "yes", "no"]) $ \actual_val ->
if testval
then parseYesNo def Nothing ==? Ok def
else let result = parseYesNo def (Just actual_val)
in if actual_val `elem` ["yes", "no"]
then result ==? Ok (actual_val == "yes")
else property $ isBad result
-- | Check that formatCmdUsage works similar to Python _FormatUsage.
case_formatCommands :: Assertion
case_formatCommands =
assertEqual "proper wrap for HTools Main"
resCmdTest (formatCommands personalities)
where resCmdTest :: [String]
resCmdTest =
[ " hail - Ganeti IAllocator plugin that implements the instance\
\ placement and"
, " movement using the same algorithm as hbal(1)"
, " hbal - cluster balancer that looks at the current state of\
\ the cluster and"
, " computes a series of steps designed to bring the\
\ cluster into a"
, " better state"
, " hcheck - cluster checker; prints information about cluster's\
\ health and checks"
, " whether a rebalance done using hbal would help"
, " hinfo - cluster information printer; it prints information\
\ about the current"
, " cluster state and its residing nodes/instances"
, " hroller - cluster rolling maintenance helper; it helps\
\ scheduling node reboots"
, " in a manner that doesn't conflict with the instances'\
\ topology"
, " hscan - tool for scanning clusters via RAPI and saving their\
\ data in the"
, " input format used by hbal(1) and hspace(1)"
, " hspace - computes how many additional instances can be fit on a\
\ cluster, while"
, " maintaining N+1 status."
]
testSuite "Common"
[ 'prop_parse_yes_no
, 'case_formatCommands
]
|
sarahn/ganeti
|
test/hs/Test/Ganeti/Common.hs
|
gpl-2.0
| 6,790 | 0 | 17 | 2,260 | 1,125 | 619 | 506 | 109 | 5 |
-- What a basic console application would look like using hplayground
import Haste.HPlay.View
import Prelude hiding(print)
main= runBody $ do
print "What is your name?"
name <- getString Nothing `fire` OnKeyUp
print $ "hello " ++ name ++"!"
print= wraw . pre
|
agocorona/tryhplay
|
examples/console.hs
|
gpl-3.0
| 273 | 0 | 10 | 56 | 72 | 38 | 34 | 7 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE PackageImports #-}
import "hint" HLint.Default
import "hint" HLint.Dollar
ignore "Use import/export shortcut"
|
saep/nvim-hs
|
HLint.hs
|
apache-2.0
| 157 | 0 | 5 | 19 | 21 | 12 | 9 | 5 | 0 |
{-# LANGUAGE TupleSections #-}
{-|
This module contains the logging configuration functions for K3's logging
system.
-}
module Language.K3.Utils.Logger.Config
( configureLogging
, parseInstruction
, configureLoggingHandlers
, configureByInstruction
) where
import Control.Applicative ((<$>))
import Data.List.Split
import System.Log
import System.Log.Formatter
import System.Log.Handler.Simple
import System.Log.Logger
-- | Configures logging from a set of logging level strings. These strings are
-- expected to be of the form "PRIO" or "PRIO:NAME" where PRIO is a logging
-- priority (one of debug, info, notice, warning, error, critical, alert, or
-- emergency) and NAME is the name of a module subtree. Returns True if
-- configuration was successful; returns False if something went wrong. If
-- an error occurs, a message is printed before False is returned.
configureLogging :: [String] -> IO Bool
configureLogging configs =
case mapM parseInstruction configs of
Left err -> do
putStrLn $ "Logging configuration error: " ++ err
return False
Right steps -> do
mapM_ configureByInstruction steps
return True
-- | Given a module name and a priority, sets that module to log only messages
-- of that priority and higher.
configureByInstruction :: (String, Priority) -> IO ()
configureByInstruction (loggerName, prio) =
updateGlobalLogger loggerName $ setLevel prio
parseInstruction :: String -> Either String (String, Priority)
parseInstruction str =
let elems = splitOn ":" str in
case elems of
_:_:_:_ -> Left $ "Too many colons: " ++ str
[] -> Left "Invalid logging configuration"
[prioStr] ->
(rootLoggerName,) <$> nameToPrio prioStr
[name, prioStr] ->
(name,) <$> nameToPrio prioStr
where
nameToPrio :: String -> Either String Priority
nameToPrio prioStr =
maybe (Left $ "Invalid priority: " ++ prioStr) Right $
parsePriority prioStr
parsePriority :: String -> Maybe Priority
parsePriority prioStr =
case prioStr of
"debug" -> Just DEBUG
"info" -> Just INFO
"notice" -> Just NOTICE
"warning" -> Just WARNING
"error" -> Just ERROR
"critical" -> Just CRITICAL
"alert" -> Just ALERT
"emergency" -> Just EMERGENCY
_ -> Nothing
-- | Configures logging handlers for the interpreter.
configureLoggingHandlers :: IO ()
configureLoggingHandlers =
updateGlobalLogger rootLoggerName $ setHandlers [handler]
where
handler = GenericHandler
{ priority = DEBUG
, privData = ()
, writeFunc = const putStrLn
, closeFunc = const $ return ()
, formatter = simpleLogFormatter "($prio): $msg"
}
|
DaMSL/K3
|
src/Language/K3/Utils/Logger/Config.hs
|
apache-2.0
| 2,682 | 0 | 12 | 565 | 562 | 296 | 266 | 59 | 9 |
{-# LANGUAGE TemplateHaskell, ScopedTypeVariables, NamedFieldPuns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for the job scheduler.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.JQScheduler (testJQScheduler) where
import Control.Lens ((&), (.~), _2)
import qualified Data.ByteString.UTF8 as UTF8
import Data.List (inits)
import Data.Maybe
import qualified Data.Map as Map
import Data.Set (Set, difference)
import qualified Data.Set as Set
import Text.JSON (JSValue(..))
import Test.HUnit
import Test.QuickCheck
import Test.Ganeti.JQueue.Objects (genQueuedOpCode, genJobId, justNoTs)
import Test.Ganeti.SlotMap (genTestKey, overfullKeys)
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Test.Ganeti.Types ()
import Ganeti.JQScheduler.Filtering
import Ganeti.JQScheduler.ReasonRateLimiting
import Ganeti.JQScheduler.Types
import Ganeti.JQueue.Lens
import Ganeti.JQueue.Objects
import Ganeti.Objects (FilterRule(..), FilterPredicate(..), FilterAction(..),
filterRuleOrder)
import Ganeti.OpCodes
import Ganeti.OpCodes.Lens
import Ganeti.Query.Language (Filter(..), FilterValue(..))
import Ganeti.SlotMap
import Ganeti.Types
import Ganeti.Utils (isSubsequenceOf, newUUID)
{-# ANN module "HLint: ignore Use camelCase" #-}
genRateLimitReason :: Gen String
genRateLimitReason = do
Slot{ slotLimit = n } <- arbitrary
l <- genTestKey
return $ "rate-limit:" ++ show n ++ ":" ++ l
instance Arbitrary QueuedJob where
arbitrary = do
-- For our scheduler testing purposes here, we only care about
-- opcodes, job ID and reason rate limits.
jid <- genJobId
ops <- resize 5 . listOf1 $ do
o <- genQueuedOpCode
-- Put some rate limits into the OpCode.
limitString <- genRateLimitReason
return $
o & qoInputL . validOpCodeL . metaParamsL . opReasonL . traverse . _2
.~ limitString
return $ QueuedJob jid ops justNoTs justNoTs justNoTs Nothing Nothing
instance Arbitrary JobWithStat where
arbitrary = nullJobWithStat <$> arbitrary
shrink job = [ job { jJob = x } | x <- shrink (jJob job) ]
instance Arbitrary Queue where
arbitrary = do
let genJobsUniqueJIDs :: [JobWithStat] -> Gen [JobWithStat]
genJobsUniqueJIDs = listOfUniqueBy arbitrary (qjId . jJob)
queued <- genJobsUniqueJIDs []
running <- genJobsUniqueJIDs queued
manip <- genJobsUniqueJIDs (queued ++ running)
return $ Queue queued running manip
shrink q =
[ q { qEnqueued = x } | x <- shrink (qEnqueued q) ] ++
[ q { qRunning = x } | x <- shrink (qRunning q) ] ++
[ q { qManipulated = x } | x <- shrink (qManipulated q) ]
-- * Test cases
-- | Tests rate limit reason trail parsing.
case_parseReasonRateLimit :: Assertion
case_parseReasonRateLimit = do
assertBool "default case" $
let a = parseReasonRateLimit "rate-limit:20:my label"
b = parseReasonRateLimit "rate-limit:21:my label"
in and
[ a == Just ("20:my label", 20)
, b == Just ("21:my label", 21)
]
assertEqual "be picky about whitespace"
Nothing
(parseReasonRateLimit " rate-limit:20:my label")
-- | Tests that "rateLimit:n:..." and "rateLimit:m:..." become different
-- rate limiting buckets.
prop_slotMapFromJob_conflicting_buckets :: Property
prop_slotMapFromJob_conflicting_buckets = do
let sameBucketReasonStringGen :: Gen (String, String)
sameBucketReasonStringGen = do
Positive (n :: Int) <- arbitrary
Positive (m :: Int) <- arbitrary `suchThat` (/= Positive n)
l <- genPrintableAsciiString
return ( "rate-limit:" ++ show n ++ ":" ++ l
, "rate-limit:" ++ show m ++ ":" ++ l )
forAll sameBucketReasonStringGen $ \(s1, s2) -> do
(lab1, lim1) <- parseReasonRateLimit s1
(lab2, _ ) <- parseReasonRateLimit s2
let sm = Map.fromList [(lab1, Slot 1 lim1)]
cm = Map.fromList [(lab2, 1)]
in return $
(sm `occupySlots` cm) ==? Map.fromList [ (lab1, Slot 1 lim1)
, (lab2, Slot 1 0)
] :: Gen Property
-- | Tests some basic cases for reason rate limiting.
case_reasonRateLimit :: Assertion
case_reasonRateLimit = do
let mkJobWithReason jobNum reasonTrail = do
opc <- genSample genQueuedOpCode
jid <- makeJobId jobNum
let opc' = opc & (qoInputL . validOpCodeL . metaParamsL . opReasonL)
.~ reasonTrail
return . nullJobWithStat
$ QueuedJob
{ qjId = jid
, qjOps = [opc']
, qjReceivedTimestamp = Nothing
, qjStartTimestamp = Nothing
, qjEndTimestamp = Nothing
, qjLivelock = Nothing
, qjProcessId = Nothing
}
-- 3 jobs, limited to 2 of them running.
j1 <- mkJobWithReason 1 [("source1", "rate-limit:2:hello", 0)]
j2 <- mkJobWithReason 2 [("source1", "rate-limit:2:hello", 0)]
j3 <- mkJobWithReason 3 [("source1", "rate-limit:2:hello", 0)]
assertEqual "[j1] should not be rate-limited"
[j1]
(reasonRateLimit (Queue [j1] [] []) [j1])
assertEqual "[j1, j2] should not be rate-limited"
[j1, j2]
(reasonRateLimit (Queue [j1, j2] [] []) [j1, j2])
assertEqual "j3 should be rate-limited 1"
[j1, j2]
(reasonRateLimit (Queue [j1, j2, j3] [] []) [j1, j2, j3])
assertEqual "j3 should be rate-limited 2"
[j2]
(reasonRateLimit (Queue [j2, j3] [j1] []) [j2, j3])
assertEqual "j3 should be rate-limited 3"
[]
(reasonRateLimit (Queue [j3] [j1] [j2]) [j3])
-- | Tests the specified properties of `reasonRateLimit`, as defined in
-- `doc/design-optables.rst`.
prop_reasonRateLimit :: Property
prop_reasonRateLimit =
forAllShrink arbitrary shrink $ \q ->
let slotMapFromJobWithStat = slotMapFromJobs . map jJob
enqueued = qEnqueued q
toRun = reasonRateLimit q enqueued
oldSlots = slotMapFromJobWithStat (qRunning q)
newSlots = slotMapFromJobWithStat (qRunning q ++ toRun)
-- What would happen without rate limiting.
newSlotsNoLimits = slotMapFromJobWithStat (qRunning q ++ enqueued)
in -- Ensure it's unlikely that jobs are all in different buckets.
cover'
50
(any ((> 1) . slotOccupied) . Map.elems $ newSlotsNoLimits)
"some jobs have the same rate-limit bucket"
-- Ensure it's likely that rate limiting has any effect.
. cover'
50
(overfullKeys newSlotsNoLimits
`difference` overfullKeys oldSlots /= Set.empty)
"queued jobs cannot be started because of rate limiting"
$ conjoin
[ counterexample "scheduled jobs must be subsequence" $
toRun `isSubsequenceOf` enqueued
-- This is the key property:
, counterexample "no job may exceed its bucket limits, except from\
\ jobs that were already running with exceeded\
\ limits; those must not increase" $
conjoin
[ if occup <= limit
-- Within limits, all fine.
then passTest
-- Bucket exceeds limits - it must have exceeded them
-- in the initial running list already, with the same
-- slot count.
else Map.lookup k oldSlots ==? Just slot
| (k, slot@(Slot occup limit)) <- Map.toList newSlots ]
]
-- | Tests that filter rule ordering is determined (solely) by priority,
-- watermark and UUID, as defined in `doc/design-optables.rst`.
prop_filterRuleOrder :: Property
prop_filterRuleOrder = property $ do
a <- arbitrary
b <- arbitrary `suchThat` ((frUuid a /=) . frUuid)
return $ filterRuleOrder a b ==? (frPriority a, frWatermark a, frUuid a)
`compare`
(frPriority b, frWatermark b, frUuid b)
-- | Tests common inputs for `matchPredicate`, especially the predicates
-- and fields available to them as defined in the spec.
case_matchPredicate :: Assertion
case_matchPredicate = do
jid1 <- makeJobId 1
clusterName <- mkNonEmpty "cluster1"
let job =
QueuedJob
{ qjId = jid1
, qjOps =
[ QueuedOpCode
{ qoInput = ValidOpCode MetaOpCode
{ metaParams = CommonOpParams
{ opDryRun = Nothing
, opDebugLevel = Nothing
, opPriority = OpPrioHigh
, opDepends = Just []
, opComment = Nothing
, opReason = [("source1", "reason1", 1234)]
}
, metaOpCode = OpClusterRename
{ opName = clusterName
}
}
, qoStatus = OP_STATUS_QUEUED
, qoResult = JSNull
, qoLog = []
, qoPriority = -1
, qoStartTimestamp = Nothing
, qoExecTimestamp = Nothing
, qoEndTimestamp = Nothing
}
]
, qjReceivedTimestamp = Nothing
, qjStartTimestamp = Nothing
, qjEndTimestamp = Nothing
, qjLivelock = Nothing
, qjProcessId = Nothing
}
let watermark = jid1
check = matchPredicate job watermark
-- jobid filters
assertEqual "matching jobid filter"
True
. check $ FPJobId (EQFilter "id" (NumericValue 1))
assertEqual "non-matching jobid filter"
False
. check $ FPJobId (EQFilter "id" (NumericValue 2))
assertEqual "non-matching jobid filter (string passed)"
False
. check $ FPJobId (EQFilter "id" (QuotedString "1"))
-- jobid filters: watermarks
assertEqual "matching jobid watermark filter"
True
. check $ FPJobId (EQFilter "id" (QuotedString "watermark"))
-- opcode filters
assertEqual "matching opcode filter (type of opcode)"
True
. check $ FPOpCode (EQFilter "OP_ID" (QuotedString "OP_CLUSTER_RENAME"))
assertEqual "non-matching opcode filter (type of opcode)"
False
. check $ FPOpCode (EQFilter "OP_ID" (QuotedString "OP_INSTANCE_CREATE"))
assertEqual "matching opcode filter (nested access)"
True
. check $ FPOpCode (EQFilter "name" (QuotedString "cluster1"))
assertEqual "non-matching opcode filter (nonexistent nested access)"
False
. check $ FPOpCode (EQFilter "something" (QuotedString "cluster1"))
-- reason filters
assertEqual "matching reason filter (reason field)"
True
. check $ FPReason (EQFilter "reason" (QuotedString "reason1"))
assertEqual "non-matching reason filter (reason field)"
False
. check $ FPReason (EQFilter "reason" (QuotedString "reasonGarbage"))
assertEqual "matching reason filter (source field)"
True
. check $ FPReason (EQFilter "source" (QuotedString "source1"))
assertEqual "matching reason filter (timestamp field)"
True
. check $ FPReason (EQFilter "timestamp" (NumericValue 1234))
assertEqual "non-matching reason filter (nonexistent field)"
False
. check $ FPReason (EQFilter "something" (QuotedString ""))
-- | Tests that jobs selected by `applyingFilter` actually match
-- and have an effect (are not CONTINUE filters).
prop_applyingFilter :: Property
prop_applyingFilter =
forAllShrink arbitrary shrink $ \job ->
forAllShrink (arbitrary `suchThat`
(isJust . flip applyingFilter job . Set.fromList)) shrink
$ \filters ->
let applying = applyingFilter (Set.fromList filters) job
in case applying of
Just f -> job `matches` f && frAction f /= Continue
Nothing -> error "Should not happen"
case_jobFiltering :: Assertion
case_jobFiltering = do
clusterName <- mkNonEmpty "cluster1"
jid1 <- makeJobId 1
jid2 <- makeJobId 2
jid3 <- makeJobId 3
jid4 <- makeJobId 4
unsetPrio <- mkNonNegative 1234
uuid1 <- fmap UTF8.fromString newUUID
let j1 =
nullJobWithStat QueuedJob
{ qjId = jid1
, qjOps =
[ QueuedOpCode
{ qoInput = ValidOpCode MetaOpCode
{ metaParams = CommonOpParams
{ opDryRun = Nothing
, opDebugLevel = Nothing
, opPriority = OpPrioHigh
, opDepends = Just []
, opComment = Nothing
, opReason = [("source1", "reason1", 1234)]}
, metaOpCode = OpClusterRename
{ opName = clusterName
}
}
, qoStatus = OP_STATUS_QUEUED
, qoResult = JSNull
, qoLog = []
, qoPriority = -1
, qoStartTimestamp = Nothing
, qoExecTimestamp = Nothing
, qoEndTimestamp = Nothing
}
]
, qjReceivedTimestamp = Nothing
, qjStartTimestamp = Nothing
, qjEndTimestamp = Nothing
, qjLivelock = Nothing
, qjProcessId = Nothing
}
j2 = j1 & jJobL . qjIdL .~ jid2
j3 = j1 & jJobL . qjIdL .~ jid3
j4 = j1 & jJobL . qjIdL .~ jid4
fr1 =
FilterRule
{ frWatermark = jid1
, frPriority = unsetPrio
, frPredicates = [FPJobId (EQFilter "id" (NumericValue 1))]
, frAction = Reject
, frReasonTrail = []
, frUuid = uuid1
}
-- Gives the rule a new UUID.
rule fr = do
uuid <- fmap UTF8.fromString newUUID
return fr{ frUuid = uuid }
-- Helper to create filter chains: assigns the filters in the list
-- increasing priorities, so that filters listed first are processed
-- first.
chain :: [FilterRule] -> Set FilterRule
chain frs
| any ((/= unsetPrio) . frPriority) frs =
error "Filter was passed to `chain` that already had a priority."
| otherwise =
Set.fromList
[ fr{ frPriority = prio }
| (fr, Just prio) <- zip frs (map mkNonNegative [1..]) ]
fr2 <- rule fr1{ frAction = Accept }
fr3 <- rule fr1{ frAction = Pause }
fr4 <- rule fr1{ frPredicates =
[FPJobId (GTFilter "id" (QuotedString "watermark"))]
}
fr5 <- rule fr1{ frPredicates = [] }
fr6 <- rule fr5{ frAction = Continue }
fr7 <- rule fr6{ frAction = RateLimit 2 }
fr8 <- rule fr4{ frAction = Continue, frWatermark = jid1 }
fr9 <- rule fr8{ frAction = RateLimit 2 }
assertEqual "j1 should be rejected (by fr1)"
[]
(jobFiltering (Queue [j1] [] []) (chain [fr1]) [j1])
assertEqual "j1 should be rejected (by fr1, it has priority)"
[]
(jobFiltering (Queue [j1] [] []) (chain [fr1, fr2]) [j1])
assertEqual "j1 should be accepted (by fr2, it has priority)"
[j1]
(jobFiltering (Queue [j1] [] []) (chain [fr2, fr1]) [j1])
assertEqual "j1 should be paused (by fr3)"
[]
(jobFiltering (Queue [j1] [] []) (chain [fr3]) [j1])
assertEqual "j2 should be rejected (over watermark1)"
[j1]
(jobFiltering (Queue [j1, j2] [] []) (chain [fr4]) [j1, j2])
assertEqual "all jobs should be rejected (since no predicates)"
[]
(jobFiltering (Queue [j1, j2] [] []) (chain [fr5]) [j1, j2])
assertEqual "j3 should be rate-limited"
[j1, j2]
(jobFiltering (Queue [j1, j2, j3] [] []) (chain [fr6, fr7]) [j1, j2, j3])
assertEqual "j4 should be rate-limited"
-- j1 doesn't apply to fr8/fr9 (since they match only watermark > jid1)
-- so j1 gets scheduled
[j1, j2, j3]
(jobFiltering (Queue [j1, j2, j3, j4] [] []) (chain [fr8, fr9])
[j1, j2, j3, j4])
-- | Tests the specified properties of `jobFiltering`, as defined in
-- `doc/design-optables.rst`.
prop_jobFiltering :: Property
prop_jobFiltering =
forAllShrink (arbitrary `suchThat` (not . null . qEnqueued)) shrink $ \q ->
forAllShrink (resize 4 arbitrary) shrink $ \(NonEmpty filterList) ->
let running = qRunning q ++ qManipulated q
enqueued = qEnqueued q
filters = Set.fromList filterList
toRun = jobFiltering q filters enqueued -- do the filtering
-- Helpers
-- Whether `fr` applies to more than `n` of the `jobs`
-- (that is, more than allowed).
exceeds :: Int -> FilterRule -> [JobWithStat] -> Bool
exceeds n fr jobs =
n < (length
. filter ((frUuid fr ==) . frUuid)
. mapMaybe (applyingFilter filters)
$ map jJob jobs)
{- TODO(#1318): restore coverage checks after a way to do it nicely
has been found.
-- Helpers for ensuring sensible coverage.
-- Makes sure that each action appears with some probability.
actionName = head . words . show
allActions = map actionName [ Accept, Continue, Pause, Reject
, RateLimit 0 ]
applyingActions = map (actionName . frAction)
. mapMaybe (applyingFilter filters)
$ map jJob enqueued
perc = 4 -- percent; low because it's per action
actionCovers =
foldr (.) id
[ stableCover (a `elem` applyingActions) perc ("is " ++ a)
| a <- allActions ]
-}
-- Note: if using `covers`, it should be before `conjoin` (see
-- QuickCheck bugs 25 and 27).
in conjoin
[ counterexample "scheduled jobs must be subsequence" $
toRun `isSubsequenceOf` enqueued
, counterexample "a reason for each job (not) being scheduled" .
-- All enqueued jobs must have a reason why they were (not)
-- scheduled, determined by the filter that applies.
flip all enqueued $ \job ->
case applyingFilter filters (jJob job) of
-- If no filter matches, the job must run.
Nothing -> job `elem` toRun
Just fr@FilterRule{ frAction } -> case frAction of
-- ACCEPT filter permit the job immediately,
-- PAUSE/REJECT forbid running, CONTINUE filters cannot
-- be the output of `applyingFilter`, and
-- RATE_LIMIT filters have a more more complex property.
Accept -> job `elem` toRun
Continue -> error "must not happen"
Pause -> job `notElem` toRun
Reject -> job `notElem` toRun
RateLimit n ->
let -- Jobs in queue before our job.
jobsBefore = takeWhile (/= job) enqueued
in if job `elem` toRun
-- If it got scheduled, the job and any job
-- before it doesn't overfill the rate limit.
then not . exceeds n fr $ running
++ jobsBefore ++ [job]
-- If didn't get scheduled, then the rate limit
-- was already full before scheduling or the job
-- or one of the jobs before made it full.
else any (exceeds n fr . (running ++))
(inits $ jobsBefore ++ [job])
-- The `inits` bit includes the [] and [...job]
-- cases.
]
testSuite "JQScheduler"
[ 'case_parseReasonRateLimit
, 'prop_slotMapFromJob_conflicting_buckets
, 'case_reasonRateLimit
, 'prop_reasonRateLimit
, 'prop_filterRuleOrder
, 'case_matchPredicate
, 'prop_applyingFilter
, 'case_jobFiltering
-- Temporarily disabled until we fix the coverage (#1318)
--, 'prop_jobFiltering
]
|
ganeti/ganeti
|
test/hs/Test/Ganeti/JQScheduler.hs
|
bsd-2-clause
| 21,793 | 0 | 28 | 7,081 | 4,441 | 2,386 | 2,055 | -1 | -1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Network/Wai/Handler/Warp/Counter.hs" #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.Counter (
Counter
, newCounter
, waitForZero
, increase
, decrease
) where
import Control.Concurrent.STM
import Control.Monad (unless)
newtype Counter = Counter (TVar Int)
newCounter :: IO Counter
newCounter = Counter <$> newTVarIO 0
waitForZero :: Counter -> IO ()
waitForZero (Counter ref) = atomically $ do
x <- readTVar ref
unless (x == 0) retry
increase :: Counter -> IO ()
increase (Counter ref) = atomically $ modifyTVar' ref $ \x -> x + 1
decrease :: Counter -> IO ()
decrease (Counter ref) = atomically $ modifyTVar' ref $ \x -> x - 1
|
phischu/fragnix
|
tests/packages/scotty/Network.Wai.Handler.Warp.Counter.hs
|
bsd-3-clause
| 824 | 0 | 10 | 257 | 232 | 124 | 108 | 22 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, BangPatterns, MagicHash, UnboxedTuples,
StandaloneDeriving, AutoDeriveTypeable, NegativeLiterals #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Int
-- Copyright : (c) The University of Glasgow 1997-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- The sized integral datatypes, 'Int8', 'Int16', 'Int32', and 'Int64'.
--
-----------------------------------------------------------------------------
module GHC.Int (
Int(..), Int8(..), Int16(..), Int32(..), Int64(..),
uncheckedIShiftL64#, uncheckedIShiftRA64#,
-- * Equality operators
-- | See GHC.Classes#matching_overloaded_methods_in_rules
eqInt, neInt, gtInt, geInt, ltInt, leInt,
eqInt8, neInt8, gtInt8, geInt8, ltInt8, leInt8,
eqInt16, neInt16, gtInt16, geInt16, ltInt16, leInt16,
eqInt32, neInt32, gtInt32, geInt32, ltInt32, leInt32,
eqInt64, neInt64, gtInt64, geInt64, ltInt64, leInt64
) where
import Data.Bits
import Data.Maybe
import GHC.Prim
import GHC.Base
import GHC.Enum
import GHC.Num
import GHC.Real
import GHC.Read
import GHC.Arr
import GHC.Word hiding (uncheckedShiftL64#, uncheckedShiftRL64#)
import GHC.Show
------------------------------------------------------------------------
-- type Int8
------------------------------------------------------------------------
-- Int8 is represented in the same way as Int. Operations may assume
-- and must ensure that it holds only values from its logical range.
data Int8 = I8# Int#
-- ^ 8-bit signed integer type
-- See GHC.Classes#matching_overloaded_methods_in_rules
-- | @since 2.01
instance Eq Int8 where
(==) = eqInt8
(/=) = neInt8
eqInt8, neInt8 :: Int8 -> Int8 -> Bool
eqInt8 (I8# x) (I8# y) = isTrue# (x ==# y)
neInt8 (I8# x) (I8# y) = isTrue# (x /=# y)
{-# INLINE [1] eqInt8 #-}
{-# INLINE [1] neInt8 #-}
-- | @since 2.01
instance Ord Int8 where
(<) = ltInt8
(<=) = leInt8
(>=) = geInt8
(>) = gtInt8
{-# INLINE [1] gtInt8 #-}
{-# INLINE [1] geInt8 #-}
{-# INLINE [1] ltInt8 #-}
{-# INLINE [1] leInt8 #-}
gtInt8, geInt8, ltInt8, leInt8 :: Int8 -> Int8 -> Bool
(I8# x) `gtInt8` (I8# y) = isTrue# (x ># y)
(I8# x) `geInt8` (I8# y) = isTrue# (x >=# y)
(I8# x) `ltInt8` (I8# y) = isTrue# (x <# y)
(I8# x) `leInt8` (I8# y) = isTrue# (x <=# y)
-- | @since 2.01
instance Show Int8 where
showsPrec p x = showsPrec p (fromIntegral x :: Int)
-- | @since 2.01
instance Num Int8 where
(I8# x#) + (I8# y#) = I8# (narrow8Int# (x# +# y#))
(I8# x#) - (I8# y#) = I8# (narrow8Int# (x# -# y#))
(I8# x#) * (I8# y#) = I8# (narrow8Int# (x# *# y#))
negate (I8# x#) = I8# (narrow8Int# (negateInt# x#))
abs x | x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = I8# (narrow8Int# (integerToInt i))
-- | @since 2.01
instance Real Int8 where
toRational x = toInteger x % 1
-- | @since 2.01
instance Enum Int8 where
succ x
| x /= maxBound = x + 1
| otherwise = succError "Int8"
pred x
| x /= minBound = x - 1
| otherwise = predError "Int8"
toEnum i@(I# i#)
| i >= fromIntegral (minBound::Int8) && i <= fromIntegral (maxBound::Int8)
= I8# i#
| otherwise = toEnumError "Int8" i (minBound::Int8, maxBound::Int8)
fromEnum (I8# x#) = I# x#
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
-- | @since 2.01
instance Integral Int8 where
quot x@(I8# x#) y@(I8# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I8# (narrow8Int# (x# `quotInt#` y#))
rem (I8# x#) y@(I8# y#)
| y == 0 = divZeroError
| otherwise = I8# (narrow8Int# (x# `remInt#` y#))
div x@(I8# x#) y@(I8# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I8# (narrow8Int# (x# `divInt#` y#))
mod (I8# x#) y@(I8# y#)
| y == 0 = divZeroError
| otherwise = I8# (narrow8Int# (x# `modInt#` y#))
quotRem x@(I8# x#) y@(I8# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case x# `quotRemInt#` y# of
(# q, r #) ->
(I8# (narrow8Int# q),
I8# (narrow8Int# r))
divMod x@(I8# x#) y@(I8# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case x# `divModInt#` y# of
(# d, m #) ->
(I8# (narrow8Int# d),
I8# (narrow8Int# m))
toInteger (I8# x#) = smallInteger x#
-- | @since 2.01
instance Bounded Int8 where
minBound = -0x80
maxBound = 0x7F
-- | @since 2.01
instance Ix Int8 where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral i - fromIntegral m
inRange (m,n) i = m <= i && i <= n
-- | @since 2.01
instance Read Int8 where
readsPrec p s = [(fromIntegral (x::Int), r) | (x, r) <- readsPrec p s]
-- | @since 2.01
instance Bits Int8 where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(I8# x#) .&. (I8# y#) = I8# (word2Int# (int2Word# x# `and#` int2Word# y#))
(I8# x#) .|. (I8# y#) = I8# (word2Int# (int2Word# x# `or#` int2Word# y#))
(I8# x#) `xor` (I8# y#) = I8# (word2Int# (int2Word# x# `xor#` int2Word# y#))
complement (I8# x#) = I8# (word2Int# (not# (int2Word# x#)))
(I8# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = I8# (narrow8Int# (x# `iShiftL#` i#))
| otherwise = I8# (x# `iShiftRA#` negateInt# i#)
(I8# x#) `shiftL` (I# i#) = I8# (narrow8Int# (x# `iShiftL#` i#))
(I8# x#) `unsafeShiftL` (I# i#) = I8# (narrow8Int# (x# `uncheckedIShiftL#` i#))
(I8# x#) `shiftR` (I# i#) = I8# (x# `iShiftRA#` i#)
(I8# x#) `unsafeShiftR` (I# i#) = I8# (x# `uncheckedIShiftRA#` i#)
(I8# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#)
= I8# x#
| otherwise
= I8# (narrow8Int# (word2Int# ((x'# `uncheckedShiftL#` i'#) `or#`
(x'# `uncheckedShiftRL#` (8# -# i'#)))))
where
!x'# = narrow8Word# (int2Word# x#)
!i'# = word2Int# (int2Word# i# `and#` 7##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = True
popCount (I8# x#) = I# (word2Int# (popCnt8# (int2Word# x#)))
bit = bitDefault
testBit = testBitDefault
-- | @since 4.6.0.0
instance FiniteBits Int8 where
finiteBitSize _ = 8
countLeadingZeros (I8# x#) = I# (word2Int# (clz8# (int2Word# x#)))
countTrailingZeros (I8# x#) = I# (word2Int# (ctz8# (int2Word# x#)))
{-# RULES
"fromIntegral/Int8->Int8" fromIntegral = id :: Int8 -> Int8
"fromIntegral/a->Int8" fromIntegral = \x -> case fromIntegral x of I# x# -> I8# (narrow8Int# x#)
"fromIntegral/Int8->a" fromIntegral = \(I8# x#) -> fromIntegral (I# x#)
#-}
{-# RULES
"properFraction/Float->(Int8,Float)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Int8) n, y :: Float) }
"truncate/Float->Int8"
truncate = (fromIntegral :: Int -> Int8) . (truncate :: Float -> Int)
"floor/Float->Int8"
floor = (fromIntegral :: Int -> Int8) . (floor :: Float -> Int)
"ceiling/Float->Int8"
ceiling = (fromIntegral :: Int -> Int8) . (ceiling :: Float -> Int)
"round/Float->Int8"
round = (fromIntegral :: Int -> Int8) . (round :: Float -> Int)
#-}
{-# RULES
"properFraction/Double->(Int8,Double)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Int8) n, y :: Double) }
"truncate/Double->Int8"
truncate = (fromIntegral :: Int -> Int8) . (truncate :: Double -> Int)
"floor/Double->Int8"
floor = (fromIntegral :: Int -> Int8) . (floor :: Double -> Int)
"ceiling/Double->Int8"
ceiling = (fromIntegral :: Int -> Int8) . (ceiling :: Double -> Int)
"round/Double->Int8"
round = (fromIntegral :: Int -> Int8) . (round :: Double -> Int)
#-}
------------------------------------------------------------------------
-- type Int16
------------------------------------------------------------------------
-- Int16 is represented in the same way as Int. Operations may assume
-- and must ensure that it holds only values from its logical range.
data Int16 = I16# Int#
-- ^ 16-bit signed integer type
-- See GHC.Classes#matching_overloaded_methods_in_rules
-- | @since 2.01
instance Eq Int16 where
(==) = eqInt16
(/=) = neInt16
eqInt16, neInt16 :: Int16 -> Int16 -> Bool
eqInt16 (I16# x) (I16# y) = isTrue# (x ==# y)
neInt16 (I16# x) (I16# y) = isTrue# (x /=# y)
{-# INLINE [1] eqInt16 #-}
{-# INLINE [1] neInt16 #-}
-- | @since 2.01
instance Ord Int16 where
(<) = ltInt16
(<=) = leInt16
(>=) = geInt16
(>) = gtInt16
{-# INLINE [1] gtInt16 #-}
{-# INLINE [1] geInt16 #-}
{-# INLINE [1] ltInt16 #-}
{-# INLINE [1] leInt16 #-}
gtInt16, geInt16, ltInt16, leInt16 :: Int16 -> Int16 -> Bool
(I16# x) `gtInt16` (I16# y) = isTrue# (x ># y)
(I16# x) `geInt16` (I16# y) = isTrue# (x >=# y)
(I16# x) `ltInt16` (I16# y) = isTrue# (x <# y)
(I16# x) `leInt16` (I16# y) = isTrue# (x <=# y)
-- | @since 2.01
instance Show Int16 where
showsPrec p x = showsPrec p (fromIntegral x :: Int)
-- | @since 2.01
instance Num Int16 where
(I16# x#) + (I16# y#) = I16# (narrow16Int# (x# +# y#))
(I16# x#) - (I16# y#) = I16# (narrow16Int# (x# -# y#))
(I16# x#) * (I16# y#) = I16# (narrow16Int# (x# *# y#))
negate (I16# x#) = I16# (narrow16Int# (negateInt# x#))
abs x | x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = I16# (narrow16Int# (integerToInt i))
-- | @since 2.01
instance Real Int16 where
toRational x = toInteger x % 1
-- | @since 2.01
instance Enum Int16 where
succ x
| x /= maxBound = x + 1
| otherwise = succError "Int16"
pred x
| x /= minBound = x - 1
| otherwise = predError "Int16"
toEnum i@(I# i#)
| i >= fromIntegral (minBound::Int16) && i <= fromIntegral (maxBound::Int16)
= I16# i#
| otherwise = toEnumError "Int16" i (minBound::Int16, maxBound::Int16)
fromEnum (I16# x#) = I# x#
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
-- | @since 2.01
instance Integral Int16 where
quot x@(I16# x#) y@(I16# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I16# (narrow16Int# (x# `quotInt#` y#))
rem (I16# x#) y@(I16# y#)
| y == 0 = divZeroError
| otherwise = I16# (narrow16Int# (x# `remInt#` y#))
div x@(I16# x#) y@(I16# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I16# (narrow16Int# (x# `divInt#` y#))
mod (I16# x#) y@(I16# y#)
| y == 0 = divZeroError
| otherwise = I16# (narrow16Int# (x# `modInt#` y#))
quotRem x@(I16# x#) y@(I16# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case x# `quotRemInt#` y# of
(# q, r #) ->
(I16# (narrow16Int# q),
I16# (narrow16Int# r))
divMod x@(I16# x#) y@(I16# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case x# `divModInt#` y# of
(# d, m #) ->
(I16# (narrow16Int# d),
I16# (narrow16Int# m))
toInteger (I16# x#) = smallInteger x#
-- | @since 2.01
instance Bounded Int16 where
minBound = -0x8000
maxBound = 0x7FFF
-- | @since 2.01
instance Ix Int16 where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral i - fromIntegral m
inRange (m,n) i = m <= i && i <= n
-- | @since 2.01
instance Read Int16 where
readsPrec p s = [(fromIntegral (x::Int), r) | (x, r) <- readsPrec p s]
-- | @since 2.01
instance Bits Int16 where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(I16# x#) .&. (I16# y#) = I16# (word2Int# (int2Word# x# `and#` int2Word# y#))
(I16# x#) .|. (I16# y#) = I16# (word2Int# (int2Word# x# `or#` int2Word# y#))
(I16# x#) `xor` (I16# y#) = I16# (word2Int# (int2Word# x# `xor#` int2Word# y#))
complement (I16# x#) = I16# (word2Int# (not# (int2Word# x#)))
(I16# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = I16# (narrow16Int# (x# `iShiftL#` i#))
| otherwise = I16# (x# `iShiftRA#` negateInt# i#)
(I16# x#) `shiftL` (I# i#) = I16# (narrow16Int# (x# `iShiftL#` i#))
(I16# x#) `unsafeShiftL` (I# i#) = I16# (narrow16Int# (x# `uncheckedIShiftL#` i#))
(I16# x#) `shiftR` (I# i#) = I16# (x# `iShiftRA#` i#)
(I16# x#) `unsafeShiftR` (I# i#) = I16# (x# `uncheckedIShiftRA#` i#)
(I16# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#)
= I16# x#
| otherwise
= I16# (narrow16Int# (word2Int# ((x'# `uncheckedShiftL#` i'#) `or#`
(x'# `uncheckedShiftRL#` (16# -# i'#)))))
where
!x'# = narrow16Word# (int2Word# x#)
!i'# = word2Int# (int2Word# i# `and#` 15##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = True
popCount (I16# x#) = I# (word2Int# (popCnt16# (int2Word# x#)))
bit = bitDefault
testBit = testBitDefault
-- | @since 4.6.0.0
instance FiniteBits Int16 where
finiteBitSize _ = 16
countLeadingZeros (I16# x#) = I# (word2Int# (clz16# (int2Word# x#)))
countTrailingZeros (I16# x#) = I# (word2Int# (ctz16# (int2Word# x#)))
{-# RULES
"fromIntegral/Word8->Int16" fromIntegral = \(W8# x#) -> I16# (word2Int# x#)
"fromIntegral/Int8->Int16" fromIntegral = \(I8# x#) -> I16# x#
"fromIntegral/Int16->Int16" fromIntegral = id :: Int16 -> Int16
"fromIntegral/a->Int16" fromIntegral = \x -> case fromIntegral x of I# x# -> I16# (narrow16Int# x#)
"fromIntegral/Int16->a" fromIntegral = \(I16# x#) -> fromIntegral (I# x#)
#-}
{-# RULES
"properFraction/Float->(Int16,Float)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Int16) n, y :: Float) }
"truncate/Float->Int16"
truncate = (fromIntegral :: Int -> Int16) . (truncate :: Float -> Int)
"floor/Float->Int16"
floor = (fromIntegral :: Int -> Int16) . (floor :: Float -> Int)
"ceiling/Float->Int16"
ceiling = (fromIntegral :: Int -> Int16) . (ceiling :: Float -> Int)
"round/Float->Int16"
round = (fromIntegral :: Int -> Int16) . (round :: Float -> Int)
#-}
{-# RULES
"properFraction/Double->(Int16,Double)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Int16) n, y :: Double) }
"truncate/Double->Int16"
truncate = (fromIntegral :: Int -> Int16) . (truncate :: Double -> Int)
"floor/Double->Int16"
floor = (fromIntegral :: Int -> Int16) . (floor :: Double -> Int)
"ceiling/Double->Int16"
ceiling = (fromIntegral :: Int -> Int16) . (ceiling :: Double -> Int)
"round/Double->Int16"
round = (fromIntegral :: Int -> Int16) . (round :: Double -> Int)
#-}
------------------------------------------------------------------------
-- type Int32
------------------------------------------------------------------------
-- Int32 is represented in the same way as Int.
data Int32 = I32# Int#
-- ^ 32-bit signed integer type
-- See GHC.Classes#matching_overloaded_methods_in_rules
-- | @since 2.01
instance Eq Int32 where
(==) = eqInt32
(/=) = neInt32
eqInt32, neInt32 :: Int32 -> Int32 -> Bool
eqInt32 (I32# x) (I32# y) = isTrue# (x ==# y)
neInt32 (I32# x) (I32# y) = isTrue# (x /=# y)
{-# INLINE [1] eqInt32 #-}
{-# INLINE [1] neInt32 #-}
-- | @since 2.01
instance Ord Int32 where
(<) = ltInt32
(<=) = leInt32
(>=) = geInt32
(>) = gtInt32
{-# INLINE [1] gtInt32 #-}
{-# INLINE [1] geInt32 #-}
{-# INLINE [1] ltInt32 #-}
{-# INLINE [1] leInt32 #-}
gtInt32, geInt32, ltInt32, leInt32 :: Int32 -> Int32 -> Bool
(I32# x) `gtInt32` (I32# y) = isTrue# (x ># y)
(I32# x) `geInt32` (I32# y) = isTrue# (x >=# y)
(I32# x) `ltInt32` (I32# y) = isTrue# (x <# y)
(I32# x) `leInt32` (I32# y) = isTrue# (x <=# y)
-- | @since 2.01
instance Show Int32 where
showsPrec p x = showsPrec p (fromIntegral x :: Int)
-- | @since 2.01
instance Num Int32 where
(I32# x#) + (I32# y#) = I32# (narrow32Int# (x# +# y#))
(I32# x#) - (I32# y#) = I32# (narrow32Int# (x# -# y#))
(I32# x#) * (I32# y#) = I32# (narrow32Int# (x# *# y#))
negate (I32# x#) = I32# (narrow32Int# (negateInt# x#))
abs x | x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = I32# (narrow32Int# (integerToInt i))
-- | @since 2.01
instance Enum Int32 where
succ x
| x /= maxBound = x + 1
| otherwise = succError "Int32"
pred x
| x /= minBound = x - 1
| otherwise = predError "Int32"
toEnum (I# i#) = I32# i#
fromEnum (I32# x#) = I# x#
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
-- | @since 2.01
instance Integral Int32 where
quot x@(I32# x#) y@(I32# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I32# (narrow32Int# (x# `quotInt#` y#))
rem (I32# x#) y@(I32# y#)
| y == 0 = divZeroError
-- The quotRem CPU instruction fails for minBound `quotRem` -1,
-- but minBound `rem` -1 is well-defined (0). We therefore
-- special-case it.
| y == (-1) = 0
| otherwise = I32# (narrow32Int# (x# `remInt#` y#))
div x@(I32# x#) y@(I32# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I32# (narrow32Int# (x# `divInt#` y#))
mod (I32# x#) y@(I32# y#)
| y == 0 = divZeroError
-- The divMod CPU instruction fails for minBound `divMod` -1,
-- but minBound `mod` -1 is well-defined (0). We therefore
-- special-case it.
| y == (-1) = 0
| otherwise = I32# (narrow32Int# (x# `modInt#` y#))
quotRem x@(I32# x#) y@(I32# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case x# `quotRemInt#` y# of
(# q, r #) ->
(I32# (narrow32Int# q),
I32# (narrow32Int# r))
divMod x@(I32# x#) y@(I32# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case x# `divModInt#` y# of
(# d, m #) ->
(I32# (narrow32Int# d),
I32# (narrow32Int# m))
toInteger (I32# x#) = smallInteger x#
-- | @since 2.01
instance Read Int32 where
readsPrec p s = [(fromIntegral (x::Int), r) | (x, r) <- readsPrec p s]
-- | @since 2.01
instance Bits Int32 where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(I32# x#) .&. (I32# y#) = I32# (word2Int# (int2Word# x# `and#` int2Word# y#))
(I32# x#) .|. (I32# y#) = I32# (word2Int# (int2Word# x# `or#` int2Word# y#))
(I32# x#) `xor` (I32# y#) = I32# (word2Int# (int2Word# x# `xor#` int2Word# y#))
complement (I32# x#) = I32# (word2Int# (not# (int2Word# x#)))
(I32# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = I32# (narrow32Int# (x# `iShiftL#` i#))
| otherwise = I32# (x# `iShiftRA#` negateInt# i#)
(I32# x#) `shiftL` (I# i#) = I32# (narrow32Int# (x# `iShiftL#` i#))
(I32# x#) `unsafeShiftL` (I# i#) =
I32# (narrow32Int# (x# `uncheckedIShiftL#` i#))
(I32# x#) `shiftR` (I# i#) = I32# (x# `iShiftRA#` i#)
(I32# x#) `unsafeShiftR` (I# i#) = I32# (x# `uncheckedIShiftRA#` i#)
(I32# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#)
= I32# x#
| otherwise
= I32# (narrow32Int# (word2Int# ((x'# `uncheckedShiftL#` i'#) `or#`
(x'# `uncheckedShiftRL#` (32# -# i'#)))))
where
!x'# = narrow32Word# (int2Word# x#)
!i'# = word2Int# (int2Word# i# `and#` 31##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = True
popCount (I32# x#) = I# (word2Int# (popCnt32# (int2Word# x#)))
bit = bitDefault
testBit = testBitDefault
-- | @since 4.6.0.0
instance FiniteBits Int32 where
finiteBitSize _ = 32
countLeadingZeros (I32# x#) = I# (word2Int# (clz32# (int2Word# x#)))
countTrailingZeros (I32# x#) = I# (word2Int# (ctz32# (int2Word# x#)))
{-# RULES
"fromIntegral/Word8->Int32" fromIntegral = \(W8# x#) -> I32# (word2Int# x#)
"fromIntegral/Word16->Int32" fromIntegral = \(W16# x#) -> I32# (word2Int# x#)
"fromIntegral/Int8->Int32" fromIntegral = \(I8# x#) -> I32# x#
"fromIntegral/Int16->Int32" fromIntegral = \(I16# x#) -> I32# x#
"fromIntegral/Int32->Int32" fromIntegral = id :: Int32 -> Int32
"fromIntegral/a->Int32" fromIntegral = \x -> case fromIntegral x of I# x# -> I32# (narrow32Int# x#)
"fromIntegral/Int32->a" fromIntegral = \(I32# x#) -> fromIntegral (I# x#)
#-}
{-# RULES
"properFraction/Float->(Int32,Float)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Int32) n, y :: Float) }
"truncate/Float->Int32"
truncate = (fromIntegral :: Int -> Int32) . (truncate :: Float -> Int)
"floor/Float->Int32"
floor = (fromIntegral :: Int -> Int32) . (floor :: Float -> Int)
"ceiling/Float->Int32"
ceiling = (fromIntegral :: Int -> Int32) . (ceiling :: Float -> Int)
"round/Float->Int32"
round = (fromIntegral :: Int -> Int32) . (round :: Float -> Int)
#-}
{-# RULES
"properFraction/Double->(Int32,Double)"
properFraction = \x ->
case properFraction x of {
(n, y) -> ((fromIntegral :: Int -> Int32) n, y :: Double) }
"truncate/Double->Int32"
truncate = (fromIntegral :: Int -> Int32) . (truncate :: Double -> Int)
"floor/Double->Int32"
floor = (fromIntegral :: Int -> Int32) . (floor :: Double -> Int)
"ceiling/Double->Int32"
ceiling = (fromIntegral :: Int -> Int32) . (ceiling :: Double -> Int)
"round/Double->Int32"
round = (fromIntegral :: Int -> Int32) . (round :: Double -> Int)
#-}
-- | @since 2.01
instance Real Int32 where
toRational x = toInteger x % 1
-- | @since 2.01
instance Bounded Int32 where
minBound = -0x80000000
maxBound = 0x7FFFFFFF
-- | @since 2.01
instance Ix Int32 where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral i - fromIntegral m
inRange (m,n) i = m <= i && i <= n
------------------------------------------------------------------------
-- type Int64
------------------------------------------------------------------------
data Int64 = I64# Int64#
-- ^ 64-bit signed integer type
-- See GHC.Classes#matching_overloaded_methods_in_rules
-- | @since 2.01
instance Eq Int64 where
(==) = eqInt64
(/=) = neInt64
eqInt64, neInt64 :: Int64 -> Int64 -> Bool
eqInt64 (I64# x) (I64# y) = isTrue# (x `eqInt64#` y)
neInt64 (I64# x) (I64# y) = isTrue# (x `neInt64#` y)
{-# INLINE [1] eqInt64 #-}
{-# INLINE [1] neInt64 #-}
-- | @since 2.01
instance Ord Int64 where
(<) = ltInt64
(<=) = leInt64
(>=) = geInt64
(>) = gtInt64
{-# INLINE [1] gtInt64 #-}
{-# INLINE [1] geInt64 #-}
{-# INLINE [1] ltInt64 #-}
{-# INLINE [1] leInt64 #-}
gtInt64, geInt64, ltInt64, leInt64 :: Int64 -> Int64 -> Bool
(I64# x) `gtInt64` (I64# y) = isTrue# (x `gtInt64#` y)
(I64# x) `geInt64` (I64# y) = isTrue# (x `geInt64#` y)
(I64# x) `ltInt64` (I64# y) = isTrue# (x `ltInt64#` y)
(I64# x) `leInt64` (I64# y) = isTrue# (x `leInt64#` y)
-- | @since 2.01
instance Show Int64 where
showsPrec p x = showsPrec p (toInteger x)
-- | @since 2.01
instance Num Int64 where
(I64# x#) + (I64# y#) = I64# (x# `plusInt64#` y#)
(I64# x#) - (I64# y#) = I64# (x# `minusInt64#` y#)
(I64# x#) * (I64# y#) = I64# (x# `timesInt64#` y#)
negate (I64# x#) = I64# (negateInt64# x#)
abs x | x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = I64# (integerToInt64 i)
-- | @since 2.01
instance Enum Int64 where
succ x
| x /= maxBound = x + 1
| otherwise = succError "Int64"
pred x
| x /= minBound = x - 1
| otherwise = predError "Int64"
toEnum (I# i#) = I64# (intToInt64# i#)
fromEnum x@(I64# x#)
| x >= fromIntegral (minBound::Int) && x <= fromIntegral (maxBound::Int)
= I# (int64ToInt# x#)
| otherwise = fromEnumError "Int64" x
enumFrom = integralEnumFrom
enumFromThen = integralEnumFromThen
enumFromTo = integralEnumFromTo
enumFromThenTo = integralEnumFromThenTo
-- | @since 2.01
instance Integral Int64 where
quot x@(I64# x#) y@(I64# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I64# (x# `quotInt64#` y#)
rem (I64# x#) y@(I64# y#)
| y == 0 = divZeroError
-- The quotRem CPU instruction fails for minBound `quotRem` -1,
-- but minBound `rem` -1 is well-defined (0). We therefore
-- special-case it.
| y == (-1) = 0
| otherwise = I64# (x# `remInt64#` y#)
div x@(I64# x#) y@(I64# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = I64# (x# `divInt64#` y#)
mod (I64# x#) y@(I64# y#)
| y == 0 = divZeroError
-- The divMod CPU instruction fails for minBound `divMod` -1,
-- but minBound `mod` -1 is well-defined (0). We therefore
-- special-case it.
| y == (-1) = 0
| otherwise = I64# (x# `modInt64#` y#)
quotRem x@(I64# x#) y@(I64# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = (I64# (x# `quotInt64#` y#),
I64# (x# `remInt64#` y#))
divMod x@(I64# x#) y@(I64# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = (I64# (x# `divInt64#` y#),
I64# (x# `modInt64#` y#))
toInteger (I64# x) = int64ToInteger x
divInt64#, modInt64# :: Int64# -> Int64# -> Int64#
-- Define div in terms of quot, being careful to avoid overflow (#7233)
x# `divInt64#` y#
| isTrue# (x# `gtInt64#` zero) && isTrue# (y# `ltInt64#` zero)
= ((x# `minusInt64#` one) `quotInt64#` y#) `minusInt64#` one
| isTrue# (x# `ltInt64#` zero) && isTrue# (y# `gtInt64#` zero)
= ((x# `plusInt64#` one) `quotInt64#` y#) `minusInt64#` one
| otherwise
= x# `quotInt64#` y#
where
!zero = intToInt64# 0#
!one = intToInt64# 1#
x# `modInt64#` y#
| isTrue# (x# `gtInt64#` zero) && isTrue# (y# `ltInt64#` zero) ||
isTrue# (x# `ltInt64#` zero) && isTrue# (y# `gtInt64#` zero)
= if isTrue# (r# `neInt64#` zero) then r# `plusInt64#` y# else zero
| otherwise = r#
where
!zero = intToInt64# 0#
!r# = x# `remInt64#` y#
-- | @since 2.01
instance Read Int64 where
readsPrec p s = [(fromInteger x, r) | (x, r) <- readsPrec p s]
-- | @since 2.01
instance Bits Int64 where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(I64# x#) .&. (I64# y#) = I64# (word64ToInt64# (int64ToWord64# x# `and64#` int64ToWord64# y#))
(I64# x#) .|. (I64# y#) = I64# (word64ToInt64# (int64ToWord64# x# `or64#` int64ToWord64# y#))
(I64# x#) `xor` (I64# y#) = I64# (word64ToInt64# (int64ToWord64# x# `xor64#` int64ToWord64# y#))
complement (I64# x#) = I64# (word64ToInt64# (not64# (int64ToWord64# x#)))
(I64# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = I64# (x# `iShiftL64#` i#)
| otherwise = I64# (x# `iShiftRA64#` negateInt# i#)
(I64# x#) `shiftL` (I# i#) = I64# (x# `iShiftL64#` i#)
(I64# x#) `unsafeShiftL` (I# i#) = I64# (x# `uncheckedIShiftL64#` i#)
(I64# x#) `shiftR` (I# i#) = I64# (x# `iShiftRA64#` i#)
(I64# x#) `unsafeShiftR` (I# i#) = I64# (x# `uncheckedIShiftRA64#` i#)
(I64# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#)
= I64# x#
| otherwise
= I64# (word64ToInt64# ((x'# `uncheckedShiftL64#` i'#) `or64#`
(x'# `uncheckedShiftRL64#` (64# -# i'#))))
where
!x'# = int64ToWord64# x#
!i'# = word2Int# (int2Word# i# `and#` 63##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = True
popCount (I64# x#) =
I# (word2Int# (popCnt64# (int64ToWord64# x#)))
bit = bitDefault
testBit = testBitDefault
-- give the 64-bit shift operations the same treatment as the 32-bit
-- ones (see GHC.Base), namely we wrap them in tests to catch the
-- cases when we're shifting more than 64 bits to avoid unspecified
-- behaviour in the C shift operations.
iShiftL64#, iShiftRA64# :: Int64# -> Int# -> Int64#
a `iShiftL64#` b | isTrue# (b >=# 64#) = intToInt64# 0#
| otherwise = a `uncheckedIShiftL64#` b
a `iShiftRA64#` b | isTrue# (b >=# 64#) = if isTrue# (a `ltInt64#` (intToInt64# 0#))
then intToInt64# (-1#)
else intToInt64# 0#
| otherwise = a `uncheckedIShiftRA64#` b
{-# RULES
"fromIntegral/Int->Int64" fromIntegral = \(I# x#) -> I64# (intToInt64# x#)
"fromIntegral/Word->Int64" fromIntegral = \(W# x#) -> I64# (word64ToInt64# (wordToWord64# x#))
"fromIntegral/Word64->Int64" fromIntegral = \(W64# x#) -> I64# (word64ToInt64# x#)
"fromIntegral/Int64->Int" fromIntegral = \(I64# x#) -> I# (int64ToInt# x#)
"fromIntegral/Int64->Word" fromIntegral = \(I64# x#) -> W# (int2Word# (int64ToInt# x#))
"fromIntegral/Int64->Word64" fromIntegral = \(I64# x#) -> W64# (int64ToWord64# x#)
"fromIntegral/Int64->Int64" fromIntegral = id :: Int64 -> Int64
#-}
-- No RULES for RealFrac methods if Int is smaller than Int64, we can't
-- go through Int and whether going through Integer is faster is uncertain.
instance FiniteBits Int64 where
finiteBitSize _ = 64
countLeadingZeros (I64# x#) = I# (word2Int# (clz64# (int64ToWord64# x#)))
countTrailingZeros (I64# x#) = I# (word2Int# (ctz64# (int64ToWord64# x#)))
-- | @since 2.01
instance Real Int64 where
toRational x = toInteger x % 1
-- | @since 2.01
instance Bounded Int64 where
minBound = -0x8000000000000000
maxBound = 0x7FFFFFFFFFFFFFFF
-- | @since 2.01
instance Ix Int64 where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral i - fromIntegral m
inRange (m,n) i = m <= i && i <= n
{- Note [Order of tests]
~~~~~~~~~~~~~~~~~~~~~~~~~
(See Trac #3065, #5161.) Suppose we had a definition like:
quot x y
| y == 0 = divZeroError
| x == minBound && y == (-1) = overflowError
| otherwise = x `primQuot` y
Note in particular that the
x == minBound
test comes before the
y == (-1)
test.
this expands to something like:
case y of
0 -> divZeroError
_ -> case x of
-9223372036854775808 ->
case y of
-1 -> overflowError
_ -> x `primQuot` y
_ -> x `primQuot` y
Now if we have the call (x `quot` 2), and quot gets inlined, then we get:
case 2 of
0 -> divZeroError
_ -> case x of
-9223372036854775808 ->
case 2 of
-1 -> overflowError
_ -> x `primQuot` 2
_ -> x `primQuot` 2
which simplifies to:
case x of
-9223372036854775808 -> x `primQuot` 2
_ -> x `primQuot` 2
Now we have a case with two identical branches, which would be
eliminated (assuming it doesn't affect strictness, which it doesn't in
this case), leaving the desired:
x `primQuot` 2
except in the minBound branch we know what x is, and GHC cleverly does
the division at compile time, giving:
case x of
-9223372036854775808 -> -4611686018427387904
_ -> x `primQuot` 2
So instead we use a definition like:
quot x y
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError
| otherwise = x `primQuot` y
which gives us:
case y of
0 -> divZeroError
-1 ->
case x of
-9223372036854775808 -> overflowError
_ -> x `primQuot` y
_ -> x `primQuot` y
for which our call (x `quot` 2) expands to:
case 2 of
0 -> divZeroError
-1 ->
case x of
-9223372036854775808 -> overflowError
_ -> x `primQuot` 2
_ -> x `primQuot` 2
which simplifies to:
x `primQuot` 2
as required.
But we now have the same problem with a constant numerator: the call
(2 `quot` y) expands to
case y of
0 -> divZeroError
-1 ->
case 2 of
-9223372036854775808 -> overflowError
_ -> 2 `primQuot` y
_ -> 2 `primQuot` y
which simplifies to:
case y of
0 -> divZeroError
-1 -> 2 `primQuot` y
_ -> 2 `primQuot` y
which simplifies to:
case y of
0 -> divZeroError
-1 -> -2
_ -> 2 `primQuot` y
However, constant denominators are more common than constant numerators,
so the
y == (-1) && x == minBound
order gives us better code in the common case.
-}
|
rahulmutt/ghcvm
|
libraries/base/GHC/Int.hs
|
bsd-3-clause
| 37,025 | 0 | 17 | 11,926 | 9,801 | 5,143 | 4,658 | 648 | 2 |
--------------------------------------------------------------------
-- |
-- Module : Text.Feed.Export
-- Copyright : (c) Galois, Inc. 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
-- Description: Convert from Feeds to XML.
--
--------------------------------------------------------------------
module Text.Feed.Export
( Text.Feed.Export.xmlFeed -- :: Feed -> XML.Element
) where
import Text.Feed.Types
import Text.Atom.Feed.Export as Atom
import Text.RSS.Export as RSS
import Text.RSS1.Export as RSS1
import Text.XML.Light as XML
-- | 'xmlFeed f' serializes a @Feed@ document into a conforming
-- XML toplevel element.
xmlFeed :: Feed -> XML.Element
xmlFeed fe =
case fe of
AtomFeed f -> Atom.xmlFeed f
RSSFeed f -> RSS.xmlRSS f
RSS1Feed f -> RSS1.xmlFeed f
XMLFeed e -> e -- that was easy!
|
GaloisInc/feed
|
Text/Feed/Export.hs
|
bsd-3-clause
| 891 | 0 | 9 | 159 | 144 | 89 | 55 | 14 | 4 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE DeriveGeneric, NoImplicitPrelude, MagicHash,
ExistentialQuantification, ImplicitParams #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Exception
-- Copyright : (c) The University of Glasgow, 2009
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable
--
-- IO-related Exception types and functions
--
-----------------------------------------------------------------------------
module GHC.IO.Exception (
BlockedIndefinitelyOnMVar(..), blockedIndefinitelyOnMVar,
BlockedIndefinitelyOnSTM(..), blockedIndefinitelyOnSTM,
Deadlock(..),
AllocationLimitExceeded(..), allocationLimitExceeded,
AssertionFailed(..),
CompactionFailed(..),
cannotCompactFunction, cannotCompactPinned, cannotCompactMutable,
SomeAsyncException(..),
asyncExceptionToException, asyncExceptionFromException,
AsyncException(..), stackOverflow, heapOverflow,
ArrayException(..),
ExitCode(..),
ioException,
ioError,
IOError,
IOException(..),
IOErrorType(..),
userError,
assertError,
unsupportedOperation,
untangle,
) where
import GHC.Base
import GHC.Generics
import GHC.List
import GHC.IO
import GHC.Show
import GHC.Read
import GHC.Exception
import GHC.IO.Handle.Types
import GHC.OldList ( intercalate )
import {-# SOURCE #-} GHC.Stack.CCS
import Foreign.C.Types
import Data.Typeable ( cast )
-- ------------------------------------------------------------------------
-- Exception datatypes and operations
-- |The thread is blocked on an @MVar@, but there are no other references
-- to the @MVar@ so it can't ever continue.
data BlockedIndefinitelyOnMVar = BlockedIndefinitelyOnMVar
-- | @since 4.1.0.0
instance Exception BlockedIndefinitelyOnMVar
-- | @since 4.1.0.0
instance Show BlockedIndefinitelyOnMVar where
showsPrec _ BlockedIndefinitelyOnMVar = showString "thread blocked indefinitely in an MVar operation"
blockedIndefinitelyOnMVar :: SomeException -- for the RTS
blockedIndefinitelyOnMVar = toException BlockedIndefinitelyOnMVar
-----
-- |The thread is waiting to retry an STM transaction, but there are no
-- other references to any @TVar@s involved, so it can't ever continue.
data BlockedIndefinitelyOnSTM = BlockedIndefinitelyOnSTM
-- | @since 4.1.0.0
instance Exception BlockedIndefinitelyOnSTM
-- | @since 4.1.0.0
instance Show BlockedIndefinitelyOnSTM where
showsPrec _ BlockedIndefinitelyOnSTM = showString "thread blocked indefinitely in an STM transaction"
blockedIndefinitelyOnSTM :: SomeException -- for the RTS
blockedIndefinitelyOnSTM = toException BlockedIndefinitelyOnSTM
-----
-- |There are no runnable threads, so the program is deadlocked.
-- The @Deadlock@ exception is raised in the main thread only.
data Deadlock = Deadlock
-- | @since 4.1.0.0
instance Exception Deadlock
-- | @since 4.1.0.0
instance Show Deadlock where
showsPrec _ Deadlock = showString "<<deadlock>>"
-----
-- |This thread has exceeded its allocation limit. See
-- 'System.Mem.setAllocationCounter' and
-- 'System.Mem.enableAllocationLimit'.
--
-- @since 4.8.0.0
data AllocationLimitExceeded = AllocationLimitExceeded
-- | @since 4.8.0.0
instance Exception AllocationLimitExceeded where
toException = asyncExceptionToException
fromException = asyncExceptionFromException
-- | @since 4.7.1.0
instance Show AllocationLimitExceeded where
showsPrec _ AllocationLimitExceeded =
showString "allocation limit exceeded"
allocationLimitExceeded :: SomeException -- for the RTS
allocationLimitExceeded = toException AllocationLimitExceeded
-----
-- | Compaction found an object that cannot be compacted. Functions
-- cannot be compacted, nor can mutable objects or pinned objects.
-- See 'Data.Compact.compact'.
--
-- @since 4.10.0.0
newtype CompactionFailed = CompactionFailed String
-- | @since 4.10.0.0
instance Exception CompactionFailed where
-- | @since 4.10.0.0
instance Show CompactionFailed where
showsPrec _ (CompactionFailed why) =
showString ("compaction failed: " ++ why)
cannotCompactFunction :: SomeException -- for the RTS
cannotCompactFunction =
toException (CompactionFailed "cannot compact functions")
cannotCompactPinned :: SomeException -- for the RTS
cannotCompactPinned =
toException (CompactionFailed "cannot compact pinned objects")
cannotCompactMutable :: SomeException -- for the RTS
cannotCompactMutable =
toException (CompactionFailed "cannot compact mutable objects")
-----
-- |'assert' was applied to 'False'.
newtype AssertionFailed = AssertionFailed String
-- | @since 4.1.0.0
instance Exception AssertionFailed
-- | @since 4.1.0.0
instance Show AssertionFailed where
showsPrec _ (AssertionFailed err) = showString err
-----
-- |Superclass for asynchronous exceptions.
--
-- @since 4.7.0.0
data SomeAsyncException = forall e . Exception e => SomeAsyncException e
-- | @since 4.7.0.0
instance Show SomeAsyncException where
show (SomeAsyncException e) = show e
-- | @since 4.7.0.0
instance Exception SomeAsyncException
-- |@since 4.7.0.0
asyncExceptionToException :: Exception e => e -> SomeException
asyncExceptionToException = toException . SomeAsyncException
-- |@since 4.7.0.0
asyncExceptionFromException :: Exception e => SomeException -> Maybe e
asyncExceptionFromException x = do
SomeAsyncException a <- fromException x
cast a
-- |Asynchronous exceptions.
data AsyncException
= StackOverflow
-- ^The current thread\'s stack exceeded its limit.
-- Since an exception has been raised, the thread\'s stack
-- will certainly be below its limit again, but the
-- programmer should take remedial action
-- immediately.
| HeapOverflow
-- ^The program\'s heap is reaching its limit, and
-- the program should take action to reduce the amount of
-- live data it has. Notes:
--
-- * It is undefined which thread receives this exception.
--
-- * GHC currently does not throw 'HeapOverflow' exceptions.
| ThreadKilled
-- ^This exception is raised by another thread
-- calling 'Control.Concurrent.killThread', or by the system
-- if it needs to terminate the thread for some
-- reason.
| UserInterrupt
-- ^This exception is raised by default in the main thread of
-- the program when the user requests to terminate the program
-- via the usual mechanism(s) (e.g. Control-C in the console).
deriving (Eq, Ord)
-- | @since 4.7.0.0
instance Exception AsyncException where
toException = asyncExceptionToException
fromException = asyncExceptionFromException
-- | Exceptions generated by array operations
data ArrayException
= IndexOutOfBounds String
-- ^An attempt was made to index an array outside
-- its declared bounds.
| UndefinedElement String
-- ^An attempt was made to evaluate an element of an
-- array that had not been initialized.
deriving (Eq, Ord)
-- | @since 4.1.0.0
instance Exception ArrayException
-- for the RTS
stackOverflow, heapOverflow :: SomeException
stackOverflow = toException StackOverflow
heapOverflow = toException HeapOverflow
-- | @since 4.1.0.0
instance Show AsyncException where
showsPrec _ StackOverflow = showString "stack overflow"
showsPrec _ HeapOverflow = showString "heap overflow"
showsPrec _ ThreadKilled = showString "thread killed"
showsPrec _ UserInterrupt = showString "user interrupt"
-- | @since 4.1.0.0
instance Show ArrayException where
showsPrec _ (IndexOutOfBounds s)
= showString "array index out of range"
. (if not (null s) then showString ": " . showString s
else id)
showsPrec _ (UndefinedElement s)
= showString "undefined array element"
. (if not (null s) then showString ": " . showString s
else id)
-- -----------------------------------------------------------------------------
-- The ExitCode type
-- We need it here because it is used in ExitException in the
-- Exception datatype (above).
-- | Defines the exit codes that a program can return.
data ExitCode
= ExitSuccess -- ^ indicates successful termination;
| ExitFailure Int
-- ^ indicates program failure with an exit code.
-- The exact interpretation of the code is
-- operating-system dependent. In particular, some values
-- may be prohibited (e.g. 0 on a POSIX-compliant system).
deriving (Eq, Ord, Read, Show, Generic)
-- | @since 4.1.0.0
instance Exception ExitCode
ioException :: IOException -> IO a
ioException err = throwIO err
-- | Raise an 'IOError' in the 'IO' monad.
ioError :: IOError -> IO a
ioError = ioException
-- ---------------------------------------------------------------------------
-- IOError type
-- | The Haskell 2010 type for exceptions in the 'IO' monad.
-- Any I\/O operation may raise an 'IOError' instead of returning a result.
-- For a more general type of exception, including also those that arise
-- in pure code, see 'Control.Exception.Exception'.
--
-- In Haskell 2010, this is an opaque type.
type IOError = IOException
-- |Exceptions that occur in the @IO@ monad.
-- An @IOException@ records a more specific error type, a descriptive
-- string and maybe the handle that was used when the error was
-- flagged.
data IOException
= IOError {
ioe_handle :: Maybe Handle, -- the handle used by the action flagging
-- the error.
ioe_type :: IOErrorType, -- what it was.
ioe_location :: String, -- location.
ioe_description :: String, -- error type specific information.
ioe_errno :: Maybe CInt, -- errno leading to this error, if any.
ioe_filename :: Maybe FilePath -- filename the error is related to.
}
-- | @since 4.1.0.0
instance Exception IOException
-- | @since 4.1.0.0
instance Eq IOException where
(IOError h1 e1 loc1 str1 en1 fn1) == (IOError h2 e2 loc2 str2 en2 fn2) =
e1==e2 && str1==str2 && h1==h2 && loc1==loc2 && en1==en2 && fn1==fn2
-- | An abstract type that contains a value for each variant of 'IOError'.
data IOErrorType
-- Haskell 2010:
= AlreadyExists
| NoSuchThing
| ResourceBusy
| ResourceExhausted
| EOF
| IllegalOperation
| PermissionDenied
| UserError
-- GHC only:
| UnsatisfiedConstraints
| SystemError
| ProtocolError
| OtherError
| InvalidArgument
| InappropriateType
| HardwareFault
| UnsupportedOperation
| TimeExpired
| ResourceVanished
| Interrupted
-- | @since 4.1.0.0
instance Eq IOErrorType where
x == y = isTrue# (getTag x ==# getTag y)
-- | @since 4.1.0.0
instance Show IOErrorType where
showsPrec _ e =
showString $
case e of
AlreadyExists -> "already exists"
NoSuchThing -> "does not exist"
ResourceBusy -> "resource busy"
ResourceExhausted -> "resource exhausted"
EOF -> "end of file"
IllegalOperation -> "illegal operation"
PermissionDenied -> "permission denied"
UserError -> "user error"
HardwareFault -> "hardware fault"
InappropriateType -> "inappropriate type"
Interrupted -> "interrupted"
InvalidArgument -> "invalid argument"
OtherError -> "failed"
ProtocolError -> "protocol error"
ResourceVanished -> "resource vanished"
SystemError -> "system error"
TimeExpired -> "timeout"
UnsatisfiedConstraints -> "unsatisfied constraints" -- ultra-precise!
UnsupportedOperation -> "unsupported operation"
-- | Construct an 'IOError' value with a string describing the error.
-- The 'fail' method of the 'IO' instance of the 'Monad' class raises a
-- 'userError', thus:
--
-- > instance Monad IO where
-- > ...
-- > fail s = ioError (userError s)
--
userError :: String -> IOError
userError str = IOError Nothing UserError "" str Nothing Nothing
-- ---------------------------------------------------------------------------
-- Showing IOErrors
-- | @since 4.1.0.0
instance Show IOException where
showsPrec p (IOError hdl iot loc s _ fn) =
(case fn of
Nothing -> case hdl of
Nothing -> id
Just h -> showsPrec p h . showString ": "
Just name -> showString name . showString ": ") .
(case loc of
"" -> id
_ -> showString loc . showString ": ") .
showsPrec p iot .
(case s of
"" -> id
_ -> showString " (" . showString s . showString ")")
-- Note the use of "lazy". This means that
-- assert False (throw e)
-- will throw the assertion failure rather than e. See trac #5561.
assertError :: (?callStack :: CallStack) => Bool -> a -> a
assertError predicate v
| predicate = lazy v
| otherwise = unsafeDupablePerformIO $ do
ccsStack <- currentCallStack
let
implicitParamCallStack = prettyCallStackLines ?callStack
ccsCallStack = showCCSStack ccsStack
stack = intercalate "\n" $ implicitParamCallStack ++ ccsCallStack
throwIO (AssertionFailed ("Assertion failed\n" ++ stack))
unsupportedOperation :: IOError
unsupportedOperation =
(IOError Nothing UnsupportedOperation ""
"Operation is not supported" Nothing Nothing)
{-
(untangle coded message) expects "coded" to be of the form
"location|details"
It prints
location message details
-}
untangle :: Addr# -> String -> String
untangle coded message
= location
++ ": "
++ message
++ details
++ "\n"
where
coded_str = unpackCStringUtf8# coded
(location, details)
= case (span not_bar coded_str) of { (loc, rest) ->
case rest of
('|':det) -> (loc, ' ' : det)
_ -> (loc, "")
}
not_bar c = c /= '|'
|
olsner/ghc
|
libraries/base/GHC/IO/Exception.hs
|
bsd-3-clause
| 14,126 | 0 | 17 | 3,025 | 2,077 | 1,170 | 907 | 234 | 2 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.ParserCombinators.ReadPrec
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (uses Text.ParserCombinators.ReadP)
--
-- This library defines parser combinators for precedence parsing.
-----------------------------------------------------------------------------
module Text.ParserCombinators.ReadPrec
(
ReadPrec,
-- * Precedences
Prec,
minPrec,
-- * Precedence operations
lift,
prec,
step,
reset,
-- * Other operations
-- | All are based directly on their similarly-named 'ReadP' counterparts.
get,
look,
(+++),
(<++),
pfail,
choice,
-- * Converters
readPrec_to_P,
readP_to_Prec,
readPrec_to_S,
readS_to_Prec,
)
where
import Text.ParserCombinators.ReadP
( ReadP
, ReadS
, readP_to_S
, readS_to_P
)
import qualified Text.ParserCombinators.ReadP as ReadP
( get
, look
, (+++), (<++)
, pfail
)
import GHC.Num( Num(..) )
import GHC.Base
#if __GLASGOW_HASKELL__ > 710
import qualified Control.Monad.Fail as MonadFail
#endif
-- ---------------------------------------------------------------------------
-- The readPrec type
newtype ReadPrec a = P (Prec -> ReadP a)
-- Functor, Monad, MonadPlus
instance Functor ReadPrec where
fmap h (P f) = P (\n -> fmap h (f n))
instance Applicative ReadPrec where
pure x = P (\_ -> pure x)
(<*>) = ap
instance Monad ReadPrec where
fail s = P (\_ -> fail s)
P f >>= k = P (\n -> do a <- f n; let P f' = k a in f' n)
#if __GLASGOW_HASKELL__ > 710
instance MonadFail.MonadFail ReadPrec where
fail s = P (\_ -> fail s)
#endif
instance MonadPlus ReadPrec where
mzero = pfail
mplus = (+++)
instance Alternative ReadPrec where
empty = mzero
(<|>) = mplus
-- precedences
type Prec = Int
minPrec :: Prec
minPrec = 0
-- ---------------------------------------------------------------------------
-- Operations over ReadPrec
lift :: ReadP a -> ReadPrec a
-- ^ Lift a precedence-insensitive 'ReadP' to a 'ReadPrec'.
lift m = P (\_ -> m)
step :: ReadPrec a -> ReadPrec a
-- ^ Increases the precedence context by one.
step (P f) = P (\n -> f (n+1))
reset :: ReadPrec a -> ReadPrec a
-- ^ Resets the precedence context to zero.
reset (P f) = P (\_ -> f minPrec)
prec :: Prec -> ReadPrec a -> ReadPrec a
-- ^ @(prec n p)@ checks whether the precedence context is
-- less than or equal to @n@, and
--
-- * if not, fails
--
-- * if so, parses @p@ in context @n@.
prec n (P f) = P (\c -> if c <= n then f n else ReadP.pfail)
-- ---------------------------------------------------------------------------
-- Derived operations
get :: ReadPrec Char
-- ^ Consumes and returns the next character.
-- Fails if there is no input left.
get = lift ReadP.get
look :: ReadPrec String
-- ^ Look-ahead: returns the part of the input that is left, without
-- consuming it.
look = lift ReadP.look
(+++) :: ReadPrec a -> ReadPrec a -> ReadPrec a
-- ^ Symmetric choice.
P f1 +++ P f2 = P (\n -> f1 n ReadP.+++ f2 n)
(<++) :: ReadPrec a -> ReadPrec a -> ReadPrec a
-- ^ Local, exclusive, left-biased choice: If left parser
-- locally produces any result at all, then right parser is
-- not used.
P f1 <++ P f2 = P (\n -> f1 n ReadP.<++ f2 n)
pfail :: ReadPrec a
-- ^ Always fails.
pfail = lift ReadP.pfail
choice :: [ReadPrec a] -> ReadPrec a
-- ^ Combines all parsers in the specified list.
choice ps = foldr (+++) pfail ps
-- ---------------------------------------------------------------------------
-- Converting between ReadPrec and Read
readPrec_to_P :: ReadPrec a -> (Int -> ReadP a)
readPrec_to_P (P f) = f
readP_to_Prec :: (Int -> ReadP a) -> ReadPrec a
readP_to_Prec f = P f
readPrec_to_S :: ReadPrec a -> (Int -> ReadS a)
readPrec_to_S (P f) n = readP_to_S (f n)
readS_to_Prec :: (Int -> ReadS a) -> ReadPrec a
readS_to_Prec f = P (\n -> readS_to_P (f n))
|
elieux/ghc
|
libraries/base/Text/ParserCombinators/ReadPrec.hs
|
bsd-3-clause
| 4,185 | 0 | 15 | 835 | 1,049 | 579 | 470 | 80 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module TwinkleBear.Lesson01 (main) where
import Prelude hiding (init)
import Linear
import Linear.Affine ( Point(P) )
import qualified SDL
import Paths_sdl2 (getDataFileName)
main :: IO ()
main = do
SDL.initializeAll
let winConfig = SDL.defaultWindow { SDL.windowPosition = SDL.Absolute (P (V2 100 100))
, SDL.windowInitialSize = V2 640 480 }
rdrConfig = SDL.RendererConfig { SDL.rendererType = SDL.AcceleratedVSyncRenderer
, SDL.rendererTargetTexture = True }
window <- SDL.createWindow "Hello World!" winConfig
renderer <- SDL.createRenderer window (-1) rdrConfig
bmp <- getDataFileName "examples/twinklebear/hello.bmp" >>= SDL.loadBMP
tex <- SDL.createTextureFromSurface renderer bmp
SDL.freeSurface bmp
SDL.clear renderer
SDL.copy renderer tex Nothing Nothing
SDL.present renderer
SDL.delay 2000
SDL.destroyTexture tex
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
|
seppeljordan/sdl2
|
examples/twinklebear/Lesson01.hs
|
bsd-3-clause
| 1,042 | 0 | 16 | 228 | 290 | 144 | 146 | 27 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
-- Protolude uses <> and options from Semigroups, but
-- optparse-applicative hasn't caught up yet.
import Protolude hiding ((<>))
import Control.Monad.Catch (MonadMask)
import Data.Monoid ((<>))
import Data.Text (unwords)
import Options.Applicative
(Parser, argument, auto, command, execParser, fullDesc, header,
help, helper, hsubparser, info, long, metavar, progDesc, short,
showDefault, value)
import qualified Options.Applicative as Options (option)
import System.GPIO.Linux.Sysfs (runSysfsGpioIO)
import System.GPIO.Monad
(MonadGpio, Pin, PinInputMode(..), PinInterruptMode(..),
PinOutputMode(..), PinValue(..), pins, pollPinTimeout,
setPinInputMode, setPinInterruptMode, setPinOutputMode, togglePin,
withPin)
-- Only one for now.
data Interpreter =
SysfsIO
deriving (Eq,Show,Read)
data GlobalOptions =
GlobalOptions {_interpreter :: !Interpreter
,_cmd :: !Command}
data Command
= ListPins
| PollPin PollPinOptions
listPinsCmd :: Parser Command
listPinsCmd = pure ListPins
data PollPinOptions =
PollPinOptions {_period :: !Int
,_trigger :: !PinInterruptMode
,_timeout :: !Int
,_outputPin :: !Pin
,_inputPin :: !Pin}
pollPinCmd :: Parser Command
pollPinCmd = PollPin <$> pollPinOptions
oneSecond :: Int
oneSecond = 1 * 1000000
pollPinOptions :: Parser PollPinOptions
pollPinOptions =
PollPinOptions <$>
Options.option auto (long "period" <>
short 'p' <>
metavar "INT" <>
value oneSecond <>
showDefault <>
help "Delay between output pin value toggles (in microseconds)") <*>
Options.option auto (long "trigger" <>
short 't' <>
metavar "Disabled|RisingEdge|FallingEdge|Level" <>
value Level <>
showDefault <>
help "Event on which to trigger the input pin") <*>
Options.option auto (long "timeout" <>
short 'T' <>
metavar "INT" <>
value (-1) <>
help "Poll timeout (in microseconds)") <*>
argument auto (metavar "INPIN") <*>
argument auto (metavar "OUTPIN")
cmds :: Parser GlobalOptions
cmds =
GlobalOptions <$>
Options.option auto (long "interpreter" <>
short 'i' <>
metavar "SysfsIO" <>
value SysfsIO <>
showDefault <>
help "Choose the GPIO interpreter (system) to use") <*>
hsubparser
(command "listPins" (info listPinsCmd (progDesc "List the GPIO pins available on the system")) <>
command "pollPin" (info pollPinCmd (progDesc "Drive INPIN using OUTPIN and wait for interrupts. (Make sure the pins are connected!")))
run :: GlobalOptions -> IO ()
run (GlobalOptions SysfsIO (PollPin (PollPinOptions period trigger timeout inputPin outputPin))) =
void $
concurrently
(void $ runSysfsGpioIO $ pollInput inputPin trigger timeout)
(runSysfsGpioIO $ driveOutput outputPin period)
run (GlobalOptions SysfsIO ListPins) = runSysfsGpioIO listPins
-- | Define a constraint that can work with multiple 'MonadGpio'
-- interpreters.
type GpioM h m = (Applicative m, MonadMask m, MonadIO m, MonadGpio h m)
listPins :: (GpioM h m) => m ()
listPins =
pins >>= \case
[] -> putText "No GPIO pins found on this system"
ps -> for_ ps $ liftIO . print
pollInput :: (GpioM h m) => Pin -> PinInterruptMode -> Int -> m ()
pollInput p trigger timeout =
withPin p $ \h ->
do setPinInputMode h InputDefault
setPinInterruptMode h trigger
forever $
do result <- pollPinTimeout h timeout
case result of
Nothing -> putText $ unwords ["readPin timed out after", show timeout, "microseconds"]
Just v -> putText $ unwords ["Input:", show v]
driveOutput :: (GpioM h m) => Pin -> Int -> m ()
driveOutput p delay =
withPin p $ \h ->
do setPinOutputMode h OutputDefault Low
forever $
do liftIO $ threadDelay delay
v <- togglePin h
putText $ unwords ["Output:", show v]
main :: IO ()
main =execParser opts >>= run
where
opts =
info (helper <*> cmds)
(fullDesc <>
progDesc "Example hpio programs." <>
header "hpio-example - run hpio demonstrations.")
|
dhess/gpio
|
examples/Gpio.hs
|
bsd-3-clause
| 4,708 | 0 | 18 | 1,425 | 1,171 | 614 | 557 | 126 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Configure
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This deals with the /configure/ phase. It provides the 'configure' action
-- which is given the package description and configure flags. It then tries
-- to: configure the compiler; resolves any conditionals in the package
-- description; resolve the package dependencies; check if all the extensions
-- used by this package are supported by the compiler; check that all the build
-- tools are available (including version checks if appropriate); checks for
-- any required @pkg-config@ packages (updating the 'BuildInfo' with the
-- results)
--
-- Then based on all this it saves the info in the 'LocalBuildInfo' and writes
-- it out to the @dist\/setup-config@ file. It also displays various details to
-- the user, the amount of information displayed depending on the verbosity
-- level.
module Distribution.Simple.Configure (configure,
writePersistBuildConfig,
getConfigStateFile,
getPersistBuildConfig,
checkPersistBuildConfigOutdated,
tryGetPersistBuildConfig,
maybeGetPersistBuildConfig,
findDistPref, findDistPrefOrDefault,
computeComponentId,
computeCompatPackageKey,
computeCompatPackageName,
localBuildInfoFile,
getInstalledPackages,
getInstalledPackagesMonitorFiles,
getPackageDBContents,
configCompiler, configCompilerAux,
configCompilerEx, configCompilerAuxEx,
ccLdOptionsBuildInfo,
checkForeignDeps,
interpretPackageDbFlags,
ConfigStateFileError(..),
tryGetConfigStateFile,
platformDefines,
relaxPackageDeps,
)
where
import Distribution.Compiler
import Distribution.Utils.NubList
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.PreProcess
import Distribution.Package
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.InstalledPackageInfo (InstalledPackageInfo
,emptyInstalledPackageInfo)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.PackageDescription as PD hiding (Flag)
import Distribution.ModuleName
import Distribution.PackageDescription.Configuration
import Distribution.PackageDescription.Check hiding (doesFileExist)
import Distribution.Simple.Program
import Distribution.Simple.Setup as Setup
import qualified Distribution.Simple.InstallDirs as InstallDirs
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Utils
import Distribution.Simple.Register (createInternalPackageDB)
import Distribution.System
import Distribution.Version
import Distribution.Verbosity
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
import qualified Distribution.Simple.JHC as JHC
import qualified Distribution.Simple.LHC as LHC
import qualified Distribution.Simple.UHC as UHC
import qualified Distribution.Simple.HaskellSuite as HaskellSuite
-- Prefer the more generic Data.Traversable.mapM to Prelude.mapM
import Prelude hiding ( mapM )
import Control.Exception
( Exception, evaluate, throw, throwIO, try )
import Control.Exception ( ErrorCall )
import Control.Monad
( liftM, when, unless, foldM, filterM, mplus )
import Distribution.Compat.Binary ( decodeOrFailIO, encode )
import GHC.Fingerprint ( Fingerprint(..), fingerprintString )
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as BLC8
import Data.List
( (\\), nub, partition, isPrefixOf, inits, stripPrefix )
import Data.Maybe
( isNothing, catMaybes, fromMaybe, mapMaybe, isJust )
import Data.Either
( partitionEithers )
import qualified Data.Set as Set
import Data.Monoid as Mon ( Monoid(..) )
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Traversable
( mapM )
import Data.Typeable
import Data.Char ( chr, isAlphaNum )
import Numeric ( showIntAtBase )
import System.Directory
( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory )
import System.FilePath
( (</>), isAbsolute )
import qualified System.Info
( compilerName, compilerVersion )
import System.IO
( hPutStrLn, hClose )
import Distribution.Text
( Text(disp), defaultStyle, display, simpleParse )
import Text.PrettyPrint
( Doc, (<>), (<+>), ($+$), char, comma, empty, hsep, nest
, punctuate, quotes, render, renderStyle, sep, text )
import Distribution.Compat.Environment ( lookupEnv )
import Distribution.Compat.Exception ( catchExit, catchIO )
import Data.Graph (graphFromEdges, topSort)
-- | The errors that can be thrown when reading the @setup-config@ file.
data ConfigStateFileError
= ConfigStateFileNoHeader -- ^ No header found.
| ConfigStateFileBadHeader -- ^ Incorrect header.
| ConfigStateFileNoParse -- ^ Cannot parse file contents.
| ConfigStateFileMissing -- ^ No file!
| ConfigStateFileBadVersion PackageIdentifier PackageIdentifier
(Either ConfigStateFileError LocalBuildInfo) -- ^ Mismatched version.
deriving (Typeable)
-- | Format a 'ConfigStateFileError' as a user-facing error message.
dispConfigStateFileError :: ConfigStateFileError -> Doc
dispConfigStateFileError ConfigStateFileNoHeader =
text "Saved package config file header is missing."
<+> text "Re-run the 'configure' command."
dispConfigStateFileError ConfigStateFileBadHeader =
text "Saved package config file header is corrupt."
<+> text "Re-run the 'configure' command."
dispConfigStateFileError ConfigStateFileNoParse =
text "Saved package config file is corrupt."
<+> text "Re-run the 'configure' command."
dispConfigStateFileError ConfigStateFileMissing =
text "Run the 'configure' command first."
dispConfigStateFileError (ConfigStateFileBadVersion oldCabal oldCompiler _) =
text "Saved package config file is outdated:"
$+$ badCabal $+$ badCompiler
$+$ text "Re-run the 'configure' command."
where
badCabal =
text "• the Cabal version changed from"
<+> disp oldCabal <+> "to" <+> disp currentCabalId
badCompiler
| oldCompiler == currentCompilerId = empty
| otherwise =
text "• the compiler changed from"
<+> disp oldCompiler <+> "to" <+> disp currentCompilerId
instance Show ConfigStateFileError where
show = renderStyle defaultStyle . dispConfigStateFileError
instance Exception ConfigStateFileError
-- | Read the 'localBuildInfoFile'. Throw an exception if the file is
-- missing, if the file cannot be read, or if the file was created by an older
-- version of Cabal.
getConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file.
-> IO LocalBuildInfo
getConfigStateFile filename = do
exists <- doesFileExist filename
unless exists $ throwIO ConfigStateFileMissing
-- Read the config file into a strict ByteString to avoid problems with
-- lazy I/O, then convert to lazy because the binary package needs that.
contents <- BS.readFile filename
let (header, body) = BLC8.span (/='\n') (BLC8.fromChunks [contents])
headerParseResult <- try $ evaluate $ parseHeader header
let (cabalId, compId) =
case headerParseResult of
Left (_ :: ErrorCall) -> throw ConfigStateFileBadHeader
Right x -> x
let getStoredValue = do
result <- decodeOrFailIO (BLC8.tail body)
case result of
Left _ -> throw ConfigStateFileNoParse
Right x -> return x
deferErrorIfBadVersion act
| cabalId /= currentCabalId = do
eResult <- try act
throw $ ConfigStateFileBadVersion cabalId compId eResult
| otherwise = act
deferErrorIfBadVersion getStoredValue
-- | Read the 'localBuildInfoFile', returning either an error or the local build
-- info.
tryGetConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file.
-> IO (Either ConfigStateFileError LocalBuildInfo)
tryGetConfigStateFile = try . getConfigStateFile
-- | Try to read the 'localBuildInfoFile'.
tryGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> IO (Either ConfigStateFileError LocalBuildInfo)
tryGetPersistBuildConfig = try . getPersistBuildConfig
-- | Read the 'localBuildInfoFile'. Throw an exception if the file is
-- missing, if the file cannot be read, or if the file was created by an older
-- version of Cabal.
getPersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> IO LocalBuildInfo
getPersistBuildConfig = getConfigStateFile . localBuildInfoFile
-- | Try to read the 'localBuildInfoFile'.
maybeGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> IO (Maybe LocalBuildInfo)
maybeGetPersistBuildConfig =
liftM (either (const Nothing) Just) . tryGetPersistBuildConfig
-- | After running configure, output the 'LocalBuildInfo' to the
-- 'localBuildInfoFile'.
writePersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> LocalBuildInfo -- ^ The 'LocalBuildInfo' to write.
-> IO ()
writePersistBuildConfig distPref lbi = do
createDirectoryIfMissing False distPref
writeFileAtomic (localBuildInfoFile distPref) $
BLC8.unlines [showHeader pkgId, encode lbi]
where
pkgId = packageId $ localPkgDescr lbi
-- | Identifier of the current Cabal package.
currentCabalId :: PackageIdentifier
currentCabalId = PackageIdentifier (PackageName "Cabal") cabalVersion
-- | Identifier of the current compiler package.
currentCompilerId :: PackageIdentifier
currentCompilerId = PackageIdentifier (PackageName System.Info.compilerName)
System.Info.compilerVersion
-- | Parse the @setup-config@ file header, returning the package identifiers
-- for Cabal and the compiler.
parseHeader :: ByteString -- ^ The file contents.
-> (PackageIdentifier, PackageIdentifier)
parseHeader header = case BLC8.words header of
["Saved", "package", "config", "for", pkgId, "written", "by", cabalId,
"using", compId] ->
fromMaybe (throw ConfigStateFileBadHeader) $ do
_ <- simpleParse (BLC8.unpack pkgId) :: Maybe PackageIdentifier
cabalId' <- simpleParse (BLC8.unpack cabalId)
compId' <- simpleParse (BLC8.unpack compId)
return (cabalId', compId')
_ -> throw ConfigStateFileNoHeader
-- | Generate the @setup-config@ file header.
showHeader :: PackageIdentifier -- ^ The processed package.
-> ByteString
showHeader pkgId = BLC8.unwords
[ "Saved", "package", "config", "for"
, BLC8.pack $ display pkgId
, "written", "by"
, BLC8.pack $ display currentCabalId
, "using"
, BLC8.pack $ display currentCompilerId
]
-- | Check that localBuildInfoFile is up-to-date with respect to the
-- .cabal file.
checkPersistBuildConfigOutdated :: FilePath -> FilePath -> IO Bool
checkPersistBuildConfigOutdated distPref pkg_descr_file = do
pkg_descr_file `moreRecentFile` (localBuildInfoFile distPref)
-- | Get the path of @dist\/setup-config@.
localBuildInfoFile :: FilePath -- ^ The @dist@ directory path.
-> FilePath
localBuildInfoFile distPref = distPref </> "setup-config"
-- -----------------------------------------------------------------------------
-- * Configuration
-- -----------------------------------------------------------------------------
-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken
-- from (in order of highest to lowest preference) the override prefix, the
-- \"CABAL_BUILDDIR\" environment variable, or the default prefix.
findDistPref :: FilePath -- ^ default \"dist\" prefix
-> Setup.Flag FilePath -- ^ override \"dist\" prefix
-> IO FilePath
findDistPref defDistPref overrideDistPref = do
envDistPref <- liftM parseEnvDistPref (lookupEnv "CABAL_BUILDDIR")
return $ fromFlagOrDefault defDistPref (mappend envDistPref overrideDistPref)
where
parseEnvDistPref env =
case env of
Just distPref | not (null distPref) -> toFlag distPref
_ -> NoFlag
-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken
-- from (in order of highest to lowest preference) the override prefix, the
-- \"CABAL_BUILDDIR\" environment variable, or 'defaultDistPref' is used. Call
-- this function to resolve a @*DistPref@ flag whenever it is not known to be
-- set. (The @*DistPref@ flags are always set to a definite value before
-- invoking 'UserHooks'.)
findDistPrefOrDefault :: Setup.Flag FilePath -- ^ override \"dist\" prefix
-> IO FilePath
findDistPrefOrDefault = findDistPref defaultDistPref
-- |Perform the \"@.\/setup configure@\" action.
-- Returns the @.setup-config@ file.
configure :: (GenericPackageDescription, HookedBuildInfo)
-> ConfigFlags -> IO LocalBuildInfo
configure (pkg_descr0', pbi) cfg = do
let pkg_descr0 =
-- Ignore '--allow-newer' when we're given '--exact-configuration'.
if fromFlagOrDefault False (configExactConfiguration cfg)
then pkg_descr0'
else relaxPackageDeps
(fromMaybe AllowNewerNone $ configAllowNewer cfg)
pkg_descr0'
setupMessage verbosity "Configuring" (packageId pkg_descr0)
checkDeprecatedFlags verbosity cfg
checkExactConfiguration pkg_descr0 cfg
-- Where to build the package
let distPref :: FilePath -- e.g. dist
distPref = fromFlag (configDistPref cfg)
buildDir :: FilePath -- e.g. dist/build
-- fromFlag OK due to Distribution.Simple calling
-- findDistPrefOrDefault to fill it in
buildDir = distPref </> "build"
createDirectoryIfMissingVerbose (lessVerbose verbosity) True buildDir
-- What package database(s) to use
let packageDbs :: PackageDBStack
packageDbs
= interpretPackageDbFlags
(fromFlag (configUserInstall cfg))
(configPackageDBs cfg)
-- comp: the compiler we're building with
-- compPlatform: the platform we're building for
-- programsConfig: location and args of all programs we're
-- building with
(comp :: Compiler,
compPlatform :: Platform,
programsConfig :: ProgramConfiguration)
<- configCompilerEx
(flagToMaybe (configHcFlavor cfg))
(flagToMaybe (configHcPath cfg))
(flagToMaybe (configHcPkg cfg))
(mkProgramsConfig cfg (configPrograms cfg))
(lessVerbose verbosity)
-- The InstalledPackageIndex of all installed packages
installedPackageSet :: InstalledPackageIndex
<- getInstalledPackages (lessVerbose verbosity) comp
packageDbs programsConfig
-- An approximate InstalledPackageIndex of all (possible) internal libraries.
-- This database is used to bootstrap the process before we know precisely
-- what these libraries are supposed to be.
let internalPackageSet :: InstalledPackageIndex
internalPackageSet = getInternalPackages pkg_descr0
-- allConstraints: The set of all 'Dependency's we have. Used ONLY
-- to 'configureFinalizedPackage'.
-- requiredDepsMap: A map from 'PackageName' to the specifically
-- required 'InstalledPackageInfo', due to --dependency
--
-- NB: These constraints are to be applied to ALL components of
-- a package. Thus, it's not an error if allConstraints contains
-- more constraints than is necessary for a component (another
-- component might need it.)
--
-- NB: The fact that we bundle all the constraints together means
-- that is not possible to configure a test-suite to use one
-- version of a dependency, and the executable to use another.
(allConstraints :: [Dependency],
requiredDepsMap :: Map PackageName InstalledPackageInfo)
<- either die return $
combinedConstraints (configConstraints cfg)
(configDependencies cfg)
installedPackageSet
-- pkg_descr: The resolved package description, that does not contain any
-- conditionals, because we have have an assignment for
-- every flag, either picking them ourselves using a
-- simple naive algorithm, or having them be passed to
-- us by 'configConfigurationsFlags')
-- flags: The 'FlagAssignment' that the conditionals were
-- resolved with.
--
-- NB: Why doesn't finalizing a package also tell us what the
-- dependencies are (e.g. when we run the naive algorithm,
-- we are checking if dependencies are satisfiable)? The
-- primary reason is that we may NOT have done any solving:
-- if the flags are all chosen for us, this step is a simple
-- matter of flattening according to that assignment. It's
-- cleaner to then configure the dependencies afterwards.
(pkg_descr :: PackageDescription,
flags :: FlagAssignment)
<- configureFinalizedPackage verbosity cfg
allConstraints
(dependencySatisfiable
(fromFlagOrDefault False (configExactConfiguration cfg))
installedPackageSet
internalPackageSet
requiredDepsMap)
comp
compPlatform
pkg_descr0
checkCompilerProblems comp pkg_descr
checkPackageProblems verbosity pkg_descr0
(updatePackageDescription pbi pkg_descr)
-- The list of 'InstalledPackageInfo' recording the selected
-- dependencies...
-- internalPkgDeps: ...on internal packages (these are fake!)
-- externalPkgDeps: ...on external packages
--
-- Invariant: For any package name, there is at most one package
-- in externalPackageDeps which has that name.
--
-- NB: The dependency selection is global over ALL components
-- in the package (similar to how allConstraints and
-- requiredDepsMap are global over all components). In particular,
-- if *any* component (post-flag resolution) has an unsatisfiable
-- dependency, we will fail. This can sometimes be undesirable
-- for users, see #1786 (benchmark conflicts with executable),
(internalPkgDeps :: [PackageId],
externalPkgDeps :: [InstalledPackageInfo])
<- configureDependencies
verbosity
internalPackageSet
installedPackageSet
requiredDepsMap
pkg_descr
-- The database of transitively reachable installed packages that the
-- external components the package (as a whole) depends on. This will be
-- used in several ways:
--
-- * We'll use it to do a consistency check so we're not depending
-- on multiple versions of the same package (TODO: someday relax
-- this for private dependencies.) See right below.
--
-- * We feed it in when configuring the components to resolve
-- module reexports. (TODO: axe this.)
--
-- * We'll pass it on in the LocalBuildInfo, where preprocessors
-- and other things will incorrectly use it to determine what
-- the include paths and everything should be.
--
packageDependsIndex :: InstalledPackageIndex <-
case PackageIndex.dependencyClosure installedPackageSet
(map Installed.installedUnitId externalPkgDeps) of
Left packageDependsIndex -> return packageDependsIndex
Right broken ->
die $ "The following installed packages are broken because other"
++ " packages they depend on are missing. These broken "
++ "packages must be rebuilt before they can be used.\n"
++ unlines [ "package "
++ display (packageId pkg)
++ " is broken due to missing package "
++ intercalate ", " (map display deps)
| (pkg, deps) <- broken ]
-- In this section, we'd like to look at the 'packageDependsIndex'
-- and see if we've picked multiple versions of the same
-- installed package (this is bad, because it means you might
-- get an error could not match foo-0.1:Type with foo-0.2:Type).
--
-- What is pseudoTopPkg for? I have no idea. It was used
-- in the very original commit which introduced checking for
-- inconsistencies 5115bb2be4e13841ea07dc9166b9d9afa5f0d012,
-- and then moved out of PackageIndex and put here later.
-- TODO: Try this code without it...
--
-- TODO: Move this into a helper function
let pseudoTopPkg :: InstalledPackageInfo
pseudoTopPkg = emptyInstalledPackageInfo {
Installed.installedUnitId =
mkLegacyUnitId (packageId pkg_descr),
Installed.sourcePackageId = packageId pkg_descr,
Installed.depends =
map Installed.installedUnitId externalPkgDeps
}
case PackageIndex.dependencyInconsistencies
. PackageIndex.insert pseudoTopPkg
$ packageDependsIndex of
[] -> return ()
inconsistencies ->
warn verbosity $
"This package indirectly depends on multiple versions of the same "
++ "package. This is highly likely to cause a compile failure.\n"
++ unlines [ "package " ++ display pkg ++ " requires "
++ display (PackageIdentifier name ver)
| (name, uses) <- inconsistencies
, (pkg, ver) <- uses ]
-- Compute installation directory templates, based on user
-- configuration.
--
-- TODO: Move this into a helper function.
defaultDirs :: InstallDirTemplates
<- defaultInstallDirs (compilerFlavor comp)
(fromFlag (configUserInstall cfg))
(hasLibs pkg_descr)
let installDirs :: InstallDirTemplates
installDirs = combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs cfg)
-- Check languages and extensions
-- TODO: Move this into a helper function.
let langlist = nub $ catMaybes $ map defaultLanguage
(allBuildInfo pkg_descr)
let langs = unsupportedLanguages comp langlist
when (not (null langs)) $
die $ "The package " ++ display (packageId pkg_descr0)
++ " requires the following languages which are not "
++ "supported by " ++ display (compilerId comp) ++ ": "
++ intercalate ", " (map display langs)
let extlist = nub $ concatMap allExtensions (allBuildInfo pkg_descr)
let exts = unsupportedExtensions comp extlist
when (not (null exts)) $
die $ "The package " ++ display (packageId pkg_descr0)
++ " requires the following language extensions which are not "
++ "supported by " ++ display (compilerId comp) ++ ": "
++ intercalate ", " (map display exts)
-- Configure known/required programs & external build tools.
-- Exclude build-tool deps on "internal" exes in the same package
--
-- TODO: Factor this into a helper package.
let requiredBuildTools =
[ buildTool
| let exeNames = map exeName (executables pkg_descr)
, bi <- allBuildInfo pkg_descr
, buildTool@(Dependency (PackageName toolName) reqVer)
<- buildTools bi
, let isInternal =
toolName `elem` exeNames
-- we assume all internal build-tools are
-- versioned with the package:
&& packageVersion pkg_descr `withinRange` reqVer
, not isInternal ]
programsConfig' <-
configureAllKnownPrograms (lessVerbose verbosity) programsConfig
>>= configureRequiredPrograms verbosity requiredBuildTools
(pkg_descr', programsConfig'') <-
configurePkgconfigPackages verbosity pkg_descr programsConfig'
-- Compute internal component graph
--
-- The general idea is that we take a look at all the source level
-- components (which may build-depends on each other) and form a graph.
-- From there, we build a ComponentLocalBuildInfo for each of the
-- components, which lets us actually build each component.
buildComponents <-
case mkComponentsGraph pkg_descr internalPkgDeps of
Left componentCycle -> reportComponentCycle componentCycle
Right comps ->
mkComponentsLocalBuildInfo cfg comp packageDependsIndex pkg_descr
internalPkgDeps externalPkgDeps
comps (configConfigurationsFlags cfg)
-- Decide if we're going to compile with split objects.
split_objs :: Bool <-
if not (fromFlag $ configSplitObjs cfg)
then return False
else case compilerFlavor comp of
GHC | compilerVersion comp >= Version [6,5] []
-> return True
GHCJS
-> return True
_ -> do warn verbosity
("this compiler does not support " ++
"--enable-split-objs; ignoring")
return False
let ghciLibByDefault =
case compilerId comp of
CompilerId GHC _ ->
-- If ghc is non-dynamic, then ghci needs object files,
-- so we build one by default.
--
-- Technically, archive files should be sufficient for ghci,
-- but because of GHC bug #8942, it has never been safe to
-- rely on them. By the time that bug was fixed, ghci had
-- been changed to read shared libraries instead of archive
-- files (see next code block).
not (GHC.isDynamic comp)
CompilerId GHCJS _ ->
not (GHCJS.isDynamic comp)
_ -> False
let sharedLibsByDefault
| fromFlag (configDynExe cfg) =
-- build a shared library if dynamically-linked
-- executables are requested
True
| otherwise = case compilerId comp of
CompilerId GHC _ ->
-- if ghc is dynamic, then ghci needs a shared
-- library, so we build one by default.
GHC.isDynamic comp
CompilerId GHCJS _ ->
GHCJS.isDynamic comp
_ -> False
withSharedLib_ =
-- build shared libraries if required by GHC or by the
-- executable linking mode, but allow the user to force
-- building only static library archives with
-- --disable-shared.
fromFlagOrDefault sharedLibsByDefault $ configSharedLib cfg
withDynExe_ = fromFlag $ configDynExe cfg
when (withDynExe_ && not withSharedLib_) $ warn verbosity $
"Executables will use dynamic linking, but a shared library "
++ "is not being built. Linking will fail if any executables "
++ "depend on the library."
-- The --profiling flag sets the default for both libs and exes,
-- but can be overidden by --library-profiling, or the old deprecated
-- --executable-profiling flag.
let profEnabledLibOnly = configProfLib cfg
profEnabledBoth = fromFlagOrDefault False (configProf cfg)
profEnabledLib = fromFlagOrDefault profEnabledBoth profEnabledLibOnly
profEnabledExe = fromFlagOrDefault profEnabledBoth (configProfExe cfg)
-- The --profiling-detail and --library-profiling-detail flags behave
-- similarly
profDetailLibOnly <- checkProfDetail (configProfLibDetail cfg)
profDetailBoth <- liftM (fromFlagOrDefault ProfDetailDefault)
(checkProfDetail (configProfDetail cfg))
let profDetailLib = fromFlagOrDefault profDetailBoth profDetailLibOnly
profDetailExe = profDetailBoth
when (profEnabledExe && not profEnabledLib) $
warn verbosity $
"Executables will be built with profiling, but library "
++ "profiling is disabled. Linking will fail if any executables "
++ "depend on the library."
let configCoverage_ =
mappend (configCoverage cfg) (configLibCoverage cfg)
cfg' = cfg { configCoverage = configCoverage_ }
reloc <-
if not (fromFlag $ configRelocatable cfg)
then return False
else return True
let lbi = LocalBuildInfo {
configFlags = cfg',
flagAssignment = flags,
extraConfigArgs = [], -- Currently configure does not
-- take extra args, but if it
-- did they would go here.
installDirTemplates = installDirs,
compiler = comp,
hostPlatform = compPlatform,
buildDir = buildDir,
componentsConfigs = buildComponents,
installedPkgs = packageDependsIndex,
pkgDescrFile = Nothing,
localPkgDescr = pkg_descr',
withPrograms = programsConfig'',
withVanillaLib = fromFlag $ configVanillaLib cfg,
withProfLib = profEnabledLib,
withSharedLib = withSharedLib_,
withDynExe = withDynExe_,
withProfExe = profEnabledExe,
withProfLibDetail = profDetailLib,
withProfExeDetail = profDetailExe,
withOptimization = fromFlag $ configOptimization cfg,
withDebugInfo = fromFlag $ configDebugInfo cfg,
withGHCiLib = fromFlagOrDefault ghciLibByDefault $
configGHCiLib cfg,
splitObjs = split_objs,
stripExes = fromFlag $ configStripExes cfg,
stripLibs = fromFlag $ configStripLibs cfg,
withPackageDB = packageDbs,
progPrefix = fromFlag $ configProgPrefix cfg,
progSuffix = fromFlag $ configProgSuffix cfg,
relocatable = reloc
}
-- Create the internal package database
_ <- createInternalPackageDB verbosity lbi distPref
when reloc (checkRelocatable verbosity pkg_descr lbi)
-- TODO: This is not entirely correct, because the dirs may vary
-- across libraries/executables
let dirs = absoluteInstallDirs pkg_descr lbi NoCopyDest
relative = prefixRelativeInstallDirs (packageId pkg_descr) lbi
unless (isAbsolute (prefix dirs)) $ die $
"expected an absolute directory name for --prefix: " ++ prefix dirs
info verbosity $ "Using " ++ display currentCabalId
++ " compiled by " ++ display currentCompilerId
info verbosity $ "Using compiler: " ++ showCompilerId comp
info verbosity $ "Using install prefix: " ++ prefix dirs
let dirinfo name dir isPrefixRelative =
info verbosity $ name ++ " installed in: " ++ dir ++ relNote
where relNote = case buildOS of
Windows | not (hasLibs pkg_descr)
&& isNothing isPrefixRelative
-> " (fixed location)"
_ -> ""
dirinfo "Binaries" (bindir dirs) (bindir relative)
dirinfo "Libraries" (libdir dirs) (libdir relative)
dirinfo "Private binaries" (libexecdir dirs) (libexecdir relative)
dirinfo "Data files" (datadir dirs) (datadir relative)
dirinfo "Documentation" (docdir dirs) (docdir relative)
dirinfo "Configuration files" (sysconfdir dirs) (sysconfdir relative)
sequence_ [ reportProgram verbosity prog configuredProg
| (prog, configuredProg) <- knownPrograms programsConfig'' ]
return lbi
where
verbosity = fromFlag (configVerbosity cfg)
checkProfDetail (Flag (ProfDetailOther other)) = do
warn verbosity $
"Unknown profiling detail level '" ++ other
++ "', using default.\n"
++ "The profiling detail levels are: " ++ intercalate ", "
[ name | (name, _, _) <- knownProfDetailLevels ]
return (Flag ProfDetailDefault)
checkProfDetail other = return other
mkProgramsConfig :: ConfigFlags -> ProgramConfiguration -> ProgramConfiguration
mkProgramsConfig cfg initialProgramsConfig = programsConfig
where
programsConfig = userSpecifyArgss (configProgramArgs cfg)
. userSpecifyPaths (configProgramPaths cfg)
. setProgramSearchPath searchpath
$ initialProgramsConfig
searchpath = getProgramSearchPath (initialProgramsConfig)
++ map ProgramSearchPathDir
(fromNubList $ configProgramPathExtra cfg)
-- -----------------------------------------------------------------------------
-- Helper functions for configure
-- | Check if the user used any deprecated flags.
checkDeprecatedFlags :: Verbosity -> ConfigFlags -> IO ()
checkDeprecatedFlags verbosity cfg = do
unless (configProfExe cfg == NoFlag) $ do
let enable | fromFlag (configProfExe cfg) = "enable"
| otherwise = "disable"
warn verbosity
("The flag --" ++ enable ++ "-executable-profiling is deprecated. "
++ "Please use --" ++ enable ++ "-profiling instead.")
unless (configLibCoverage cfg == NoFlag) $ do
let enable | fromFlag (configLibCoverage cfg) = "enable"
| otherwise = "disable"
warn verbosity
("The flag --" ++ enable ++ "-library-coverage is deprecated. "
++ "Please use --" ++ enable ++ "-coverage instead.")
-- | Sanity check: if '--exact-configuration' was given, ensure that the
-- complete flag assignment was specified on the command line.
checkExactConfiguration :: GenericPackageDescription -> ConfigFlags -> IO ()
checkExactConfiguration pkg_descr0 cfg = do
when (fromFlagOrDefault False (configExactConfiguration cfg)) $ do
let cmdlineFlags = map fst (configConfigurationsFlags cfg)
allFlags = map flagName . genPackageFlags $ pkg_descr0
diffFlags = allFlags \\ cmdlineFlags
when (not . null $ diffFlags) $
die $ "'--exact-configuration' was given, "
++ "but the following flags were not specified: "
++ intercalate ", " (map show diffFlags)
-- | Create a PackageIndex that makes *any libraries that might be*
-- defined internally to this package look like installed packages, in
-- case an executable should refer to any of them as dependencies.
--
-- It must be *any libraries that might be* defined rather than the
-- actual definitions, because these depend on conditionals in the .cabal
-- file, and we haven't resolved them yet. finalizePackageDescription
-- does the resolution of conditionals, and it takes internalPackageSet
-- as part of its input.
getInternalPackages :: GenericPackageDescription
-> InstalledPackageIndex
getInternalPackages pkg_descr0 =
let pkg_descr = flattenPackageDescription pkg_descr0
mkInternalPackage lib = emptyInstalledPackageInfo {
--TODO: should use a per-compiler method to map the source
-- package ID into an installed package id we can use
-- for the internal package set. What we do here
-- is skeevy, but we're highly unlikely to accidentally
-- shadow something legitimate.
Installed.installedUnitId = mkUnitId (libName lib),
-- NB: we TEMPORARILY set the package name to be the
-- library name. When we actually register, it won't
-- look like this; this is just so that internal
-- build-depends get resolved correctly.
Installed.sourcePackageId = PackageIdentifier (PackageName (libName lib))
(pkgVersion (package pkg_descr))
}
in PackageIndex.fromList (map mkInternalPackage (libraries pkg_descr))
-- | Returns true if a dependency is satisfiable. This is to be passed
-- to finalizePackageDescription.
dependencySatisfiable
:: Bool
-> InstalledPackageIndex -- ^ installed set
-> InstalledPackageIndex -- ^ internal set
-> Map PackageName InstalledPackageInfo -- ^ required dependencies
-> (Dependency -> Bool)
dependencySatisfiable
exact_config installedPackageSet internalPackageSet requiredDepsMap
d@(Dependency depName _)
| exact_config =
-- When we're given '--exact-configuration', we assume that all
-- dependencies and flags are exactly specified on the command
-- line. Thus we only consult the 'requiredDepsMap'. Note that
-- we're not doing the version range check, so if there's some
-- dependency that wasn't specified on the command line,
-- 'finalizePackageDescription' will fail.
--
-- TODO: mention '--exact-configuration' in the error message
-- when this fails?
--
-- (However, note that internal deps don't have to be
-- specified!)
(depName `Map.member` requiredDepsMap) || isInternalDep
| otherwise =
-- Normal operation: just look up dependency in the combined
-- package index.
not . null . PackageIndex.lookupDependency pkgs $ d
where
-- NB: Prefer the INTERNAL package set
pkgs = PackageIndex.merge installedPackageSet internalPackageSet
isInternalDep = not . null
$ PackageIndex.lookupDependency internalPackageSet d
-- | Relax the dependencies of this package if needed.
relaxPackageDeps :: AllowNewer -> GenericPackageDescription
-> GenericPackageDescription
relaxPackageDeps AllowNewerNone gpd = gpd
relaxPackageDeps AllowNewerAll gpd = transformAllBuildDepends relaxAll gpd
where
relaxAll = \(Dependency pkgName verRange) ->
Dependency pkgName (removeUpperBound verRange)
relaxPackageDeps (AllowNewerSome allowNewerDeps') gpd =
transformAllBuildDepends relaxSome gpd
where
thisPkgName = packageName gpd
allowNewerDeps = mapMaybe f allowNewerDeps'
f (Setup.AllowNewerDep p) = Just p
f (Setup.AllowNewerDepScoped scope p) | scope == thisPkgName = Just p
| otherwise = Nothing
relaxSome = \d@(Dependency depName verRange) ->
if depName `elem` allowNewerDeps
then Dependency depName (removeUpperBound verRange)
else d
-- | Finalize a generic package description. The workhorse is
-- 'finalizePackageDescription' but there's a bit of other nattering
-- about necessary.
--
-- TODO: what exactly is the business with @flaggedTests@ and
-- @flaggedBenchmarks@?
configureFinalizedPackage
:: Verbosity
-> ConfigFlags
-> [Dependency]
-> (Dependency -> Bool) -- ^ tests if a dependency is satisfiable.
-- Might say it's satisfiable even when not.
-> Compiler
-> Platform
-> GenericPackageDescription
-> IO (PackageDescription, FlagAssignment)
configureFinalizedPackage verbosity cfg
allConstraints satisfies comp compPlatform pkg_descr0 = do
let enableTest t = t { testEnabled = fromFlag (configTests cfg) }
flaggedTests = map (\(n, t) -> (n, mapTreeData enableTest t))
(condTestSuites pkg_descr0)
enableBenchmark bm = bm { benchmarkEnabled =
fromFlag (configBenchmarks cfg) }
flaggedBenchmarks = map (\(n, bm) ->
(n, mapTreeData enableBenchmark bm))
(condBenchmarks pkg_descr0)
pkg_descr0'' = pkg_descr0 { condTestSuites = flaggedTests
, condBenchmarks = flaggedBenchmarks }
(pkg_descr0', flags) <-
case finalizePackageDescription
(configConfigurationsFlags cfg)
satisfies
compPlatform
(compilerInfo comp)
allConstraints
pkg_descr0''
of Right r -> return r
Left missing ->
die $ "Encountered missing dependencies:\n"
++ (render . nest 4 . sep . punctuate comma
. map (disp . simplifyDependency)
$ missing)
-- add extra include/lib dirs as specified in cfg
-- we do it here so that those get checked too
let pkg_descr = addExtraIncludeLibDirs pkg_descr0'
when (not (null flags)) $
info verbosity $ "Flags chosen: "
++ intercalate ", " [ name ++ "=" ++ display value
| (FlagName name, value) <- flags ]
return (pkg_descr, flags)
where
addExtraIncludeLibDirs pkg_descr =
let extraBi = mempty { extraLibDirs = configExtraLibDirs cfg
, extraFrameworkDirs = configExtraFrameworkDirs cfg
, PD.includeDirs = configExtraIncludeDirs cfg}
modifyLib l = l{ libBuildInfo = libBuildInfo l
`mappend` extraBi }
modifyExecutable e = e{ buildInfo = buildInfo e
`mappend` extraBi}
in pkg_descr{ libraries = modifyLib `map` libraries pkg_descr
, executables = modifyExecutable `map`
executables pkg_descr}
-- | Check for use of Cabal features which require compiler support
checkCompilerProblems :: Compiler -> PackageDescription -> IO ()
checkCompilerProblems comp pkg_descr = do
unless (renamingPackageFlagsSupported comp ||
and [ True
| bi <- allBuildInfo pkg_descr
, _ <- Map.elems (targetBuildRenaming bi)]) $
die $ "Your compiler does not support thinning and renaming on "
++ "package flags. To use this feature you probably must use "
++ "GHC 7.9 or later."
when (any (not.null.PD.reexportedModules) (PD.libraries pkg_descr)
&& not (reexportedModulesSupported comp)) $ do
die $ "Your compiler does not support module re-exports. To use "
++ "this feature you probably must use GHC 7.9 or later."
-- | Select dependencies for the package.
configureDependencies
:: Verbosity
-> InstalledPackageIndex -- ^ internal packages
-> InstalledPackageIndex -- ^ installed packages
-> Map PackageName InstalledPackageInfo -- ^ required deps
-> PackageDescription
-> IO ([PackageId], [InstalledPackageInfo])
configureDependencies verbosity
internalPackageSet installedPackageSet requiredDepsMap pkg_descr = do
let selectDependencies :: [Dependency] ->
([FailedDependency], [ResolvedDependency])
selectDependencies =
partitionEithers
. map (selectDependency internalPackageSet installedPackageSet
requiredDepsMap)
(failedDeps, allPkgDeps) =
selectDependencies (buildDepends pkg_descr)
internalPkgDeps = [ pkgid
| InternalDependency _ pkgid <- allPkgDeps ]
externalPkgDeps = [ pkg
| ExternalDependency _ pkg <- allPkgDeps ]
when (not (null internalPkgDeps)
&& not (newPackageDepsBehaviour pkg_descr)) $
die $ "The field 'build-depends: "
++ intercalate ", " (map (display . packageName) internalPkgDeps)
++ "' refers to a library which is defined within the same "
++ "package. To use this feature the package must specify at "
++ "least 'cabal-version: >= 1.8'."
reportFailedDependencies failedDeps
reportSelectedDependencies verbosity allPkgDeps
return (internalPkgDeps, externalPkgDeps)
-- -----------------------------------------------------------------------------
-- Configuring package dependencies
reportProgram :: Verbosity -> Program -> Maybe ConfiguredProgram -> IO ()
reportProgram verbosity prog Nothing
= info verbosity $ "No " ++ programName prog ++ " found"
reportProgram verbosity prog (Just configuredProg)
= info verbosity $ "Using " ++ programName prog ++ version ++ location
where location = case programLocation configuredProg of
FoundOnSystem p -> " found on system at: " ++ p
UserSpecified p -> " given by user at: " ++ p
version = case programVersion configuredProg of
Nothing -> ""
Just v -> " version " ++ display v
hackageUrl :: String
hackageUrl = "http://hackage.haskell.org/package/"
data ResolvedDependency = ExternalDependency Dependency InstalledPackageInfo
| InternalDependency Dependency PackageId -- should be a
-- lib name
data FailedDependency = DependencyNotExists PackageName
| DependencyNoVersion Dependency
-- | Test for a package dependency and record the version we have installed.
selectDependency :: InstalledPackageIndex -- ^ Internally defined packages
-> InstalledPackageIndex -- ^ Installed packages
-> Map PackageName InstalledPackageInfo
-- ^ Packages for which we have been given specific deps to
-- use
-> Dependency
-> Either FailedDependency ResolvedDependency
selectDependency internalIndex installedIndex requiredDepsMap
dep@(Dependency pkgname vr) =
-- If the dependency specification matches anything in the internal package
-- index, then we prefer that match to anything in the second.
-- For example:
--
-- Name: MyLibrary
-- Version: 0.1
-- Library
-- ..
-- Executable my-exec
-- build-depends: MyLibrary
--
-- We want "build-depends: MyLibrary" always to match the internal library
-- even if there is a newer installed library "MyLibrary-0.2".
-- However, "build-depends: MyLibrary >= 0.2" should match the installed one.
case PackageIndex.lookupPackageName internalIndex pkgname of
[(_,[pkg])] | packageVersion pkg `withinRange` vr
-> Right $ InternalDependency dep (packageId pkg)
_ -> case Map.lookup pkgname requiredDepsMap of
-- If we know the exact pkg to use, then use it.
Just pkginstance -> Right (ExternalDependency dep pkginstance)
-- Otherwise we just pick an arbitrary instance of the latest version.
Nothing -> case PackageIndex.lookupDependency installedIndex dep of
[] -> Left $ DependencyNotExists pkgname
pkgs -> Right $ ExternalDependency dep $
case last pkgs of
(_ver, pkginstances) -> head pkginstances
reportSelectedDependencies :: Verbosity
-> [ResolvedDependency] -> IO ()
reportSelectedDependencies verbosity deps =
info verbosity $ unlines
[ "Dependency " ++ display (simplifyDependency dep)
++ ": using " ++ display pkgid
| resolved <- deps
, let (dep, pkgid) = case resolved of
ExternalDependency dep' pkg' -> (dep', packageId pkg')
InternalDependency dep' pkgid' -> (dep', pkgid') ]
reportFailedDependencies :: [FailedDependency] -> IO ()
reportFailedDependencies [] = return ()
reportFailedDependencies failed =
die (intercalate "\n\n" (map reportFailedDependency failed))
where
reportFailedDependency (DependencyNotExists pkgname) =
"there is no version of " ++ display pkgname ++ " installed.\n"
++ "Perhaps you need to download and install it from\n"
++ hackageUrl ++ display pkgname ++ "?"
reportFailedDependency (DependencyNoVersion dep) =
"cannot satisfy dependency " ++ display (simplifyDependency dep) ++ "\n"
-- | List all installed packages in the given package databases.
getInstalledPackages :: Verbosity -> Compiler
-> PackageDBStack -- ^ The stack of package databases.
-> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packageDBs progconf = do
when (null packageDBs) $
die $ "No package databases have been specified. If you use "
++ "--package-db=clear, you must follow it with --package-db= "
++ "with 'global', 'user' or a specific file."
info verbosity "Reading installed packages..."
case compilerFlavor comp of
GHC -> GHC.getInstalledPackages verbosity comp packageDBs progconf
GHCJS -> GHCJS.getInstalledPackages verbosity packageDBs progconf
JHC -> JHC.getInstalledPackages verbosity packageDBs progconf
LHC -> LHC.getInstalledPackages verbosity packageDBs progconf
UHC -> UHC.getInstalledPackages verbosity comp packageDBs progconf
HaskellSuite {} ->
HaskellSuite.getInstalledPackages verbosity packageDBs progconf
flv -> die $ "don't know how to find the installed packages for "
++ display flv
-- | Like 'getInstalledPackages', but for a single package DB.
--
-- NB: Why isn't this always a fall through to 'getInstalledPackages'?
-- That is because 'getInstalledPackages' performs some sanity checks
-- on the package database stack in question. However, when sandboxes
-- are involved these sanity checks are not desirable.
getPackageDBContents :: Verbosity -> Compiler
-> PackageDB -> ProgramConfiguration
-> IO InstalledPackageIndex
getPackageDBContents verbosity comp packageDB progconf = do
info verbosity "Reading installed packages..."
case compilerFlavor comp of
GHC -> GHC.getPackageDBContents verbosity packageDB progconf
GHCJS -> GHCJS.getPackageDBContents verbosity packageDB progconf
-- For other compilers, try to fall back on 'getInstalledPackages'.
_ -> getInstalledPackages verbosity comp [packageDB] progconf
-- | A set of files (or directories) that can be monitored to detect when
-- there might have been a change in the installed packages.
--
getInstalledPackagesMonitorFiles :: Verbosity -> Compiler
-> PackageDBStack
-> ProgramConfiguration -> Platform
-> IO [FilePath]
getInstalledPackagesMonitorFiles verbosity comp packageDBs progconf platform =
case compilerFlavor comp of
GHC -> GHC.getInstalledPackagesMonitorFiles
verbosity platform progconf packageDBs
other -> do
warn verbosity $ "don't know how to find change monitoring files for "
++ "the installed package databases for " ++ display other
return []
-- | The user interface specifies the package dbs to use with a combination of
-- @--global@, @--user@ and @--package-db=global|user|clear|$file@.
-- This function combines the global/user flag and interprets the package-db
-- flag into a single package db stack.
--
interpretPackageDbFlags :: Bool -> [Maybe PackageDB] -> PackageDBStack
interpretPackageDbFlags userInstall specificDBs =
extra initialStack specificDBs
where
initialStack | userInstall = [GlobalPackageDB, UserPackageDB]
| otherwise = [GlobalPackageDB]
extra dbs' [] = dbs'
extra _ (Nothing:dbs) = extra [] dbs
extra dbs' (Just db:dbs) = extra (dbs' ++ [db]) dbs
newPackageDepsBehaviourMinVersion :: Version
newPackageDepsBehaviourMinVersion = Version [1,7,1] []
-- In older cabal versions, there was only one set of package dependencies for
-- the whole package. In this version, we can have separate dependencies per
-- target, but we only enable this behaviour if the minimum cabal version
-- specified is >= a certain minimum. Otherwise, for compatibility we use the
-- old behaviour.
newPackageDepsBehaviour :: PackageDescription -> Bool
newPackageDepsBehaviour pkg =
specVersion pkg >= newPackageDepsBehaviourMinVersion
-- We are given both --constraint="foo < 2.0" style constraints and also
-- specific packages to pick via --dependency="foo=foo-2.0-177d5cdf20962d0581".
--
-- When finalising the package we have to take into account the specific
-- installed deps we've been given, and the finalise function expects
-- constraints, so we have to translate these deps into version constraints.
--
-- But after finalising we then have to make sure we pick the right specific
-- deps in the end. So we still need to remember which installed packages to
-- pick.
combinedConstraints :: [Dependency] ->
[(PackageName, UnitId)] ->
InstalledPackageIndex ->
Either String ([Dependency],
Map PackageName InstalledPackageInfo)
combinedConstraints constraints dependencies installedPackages = do
when (not (null badUnitIds)) $
Left $ render $ text "The following package dependencies were requested"
$+$ nest 4 (dispDependencies badUnitIds)
$+$ text "however the given installed package instance does not exist."
when (not (null badNames)) $
Left $ render $ text "The following package dependencies were requested"
$+$ nest 4 (dispDependencies badNames)
$+$ text ("however the installed package's name does not match "
++ "the name given.")
--TODO: we don't check that all dependencies are used!
return (allConstraints, idConstraintMap)
where
allConstraints :: [Dependency]
allConstraints = constraints
++ [ thisPackageVersion (packageId pkg)
| (_, _, Just pkg) <- dependenciesPkgInfo ]
idConstraintMap :: Map PackageName InstalledPackageInfo
idConstraintMap = Map.fromList
[ (packageName pkg, pkg)
| (_, _, Just pkg) <- dependenciesPkgInfo ]
-- The dependencies along with the installed package info, if it exists
dependenciesPkgInfo :: [(PackageName, UnitId,
Maybe InstalledPackageInfo)]
dependenciesPkgInfo =
[ (pkgname, ipkgid, mpkg)
| (pkgname, ipkgid) <- dependencies
, let mpkg = PackageIndex.lookupUnitId
installedPackages ipkgid
]
-- If we looked up a package specified by an installed package id
-- (i.e. someone has written a hash) and didn't find it then it's
-- an error.
badUnitIds =
[ (pkgname, ipkgid)
| (pkgname, ipkgid, Nothing) <- dependenciesPkgInfo ]
-- If someone has written e.g.
-- --dependency="foo=MyOtherLib-1.0-07...5bf30" then they have
-- probably made a mistake.
badNames =
[ (requestedPkgName, ipkgid)
| (requestedPkgName, ipkgid, Just pkg) <- dependenciesPkgInfo
, let foundPkgName = packageName pkg
, requestedPkgName /= foundPkgName ]
dispDependencies deps =
hsep [ text "--dependency="
<> quotes (disp pkgname <> char '=' <> disp ipkgid)
| (pkgname, ipkgid) <- deps ]
-- -----------------------------------------------------------------------------
-- Configuring program dependencies
configureRequiredPrograms :: Verbosity -> [Dependency] -> ProgramConfiguration
-> IO ProgramConfiguration
configureRequiredPrograms verbosity deps conf =
foldM (configureRequiredProgram verbosity) conf deps
configureRequiredProgram :: Verbosity -> ProgramConfiguration -> Dependency
-> IO ProgramConfiguration
configureRequiredProgram verbosity conf
(Dependency (PackageName progName) verRange) =
case lookupKnownProgram progName conf of
Nothing -> die ("Unknown build tool " ++ progName)
Just prog
-- requireProgramVersion always requires the program have a version
-- but if the user says "build-depends: foo" ie no version constraint
-- then we should not fail if we cannot discover the program version.
| verRange == anyVersion -> do
(_, conf') <- requireProgram verbosity prog conf
return conf'
| otherwise -> do
(_, _, conf') <- requireProgramVersion verbosity prog verRange conf
return conf'
-- -----------------------------------------------------------------------------
-- Configuring pkg-config package dependencies
configurePkgconfigPackages :: Verbosity -> PackageDescription
-> ProgramConfiguration
-> IO (PackageDescription, ProgramConfiguration)
configurePkgconfigPackages verbosity pkg_descr conf
| null allpkgs = return (pkg_descr, conf)
| otherwise = do
(_, _, conf') <- requireProgramVersion
(lessVerbose verbosity) pkgConfigProgram
(orLaterVersion $ Version [0,9,0] []) conf
mapM_ requirePkg allpkgs
libs' <- mapM addPkgConfigBILib (libraries pkg_descr)
exes' <- mapM addPkgConfigBIExe (executables pkg_descr)
tests' <- mapM addPkgConfigBITest (testSuites pkg_descr)
benches' <- mapM addPkgConfigBIBench (benchmarks pkg_descr)
let pkg_descr' = pkg_descr { libraries = libs', executables = exes',
testSuites = tests', benchmarks = benches' }
return (pkg_descr', conf')
where
allpkgs = concatMap pkgconfigDepends (allBuildInfo pkg_descr)
pkgconfig = rawSystemProgramStdoutConf (lessVerbose verbosity)
pkgConfigProgram conf
requirePkg dep@(Dependency (PackageName pkg) range) = do
version <- pkgconfig ["--modversion", pkg]
`catchIO` (\_ -> die notFound)
`catchExit` (\_ -> die notFound)
case simpleParse version of
Nothing -> die "parsing output of pkg-config --modversion failed"
Just v | not (withinRange v range) -> die (badVersion v)
| otherwise -> info verbosity (depSatisfied v)
where
notFound = "The pkg-config package '" ++ pkg ++ "'"
++ versionRequirement
++ " is required but it could not be found."
badVersion v = "The pkg-config package '" ++ pkg ++ "'"
++ versionRequirement
++ " is required but the version installed on the"
++ " system is version " ++ display v
depSatisfied v = "Dependency " ++ display dep
++ ": using version " ++ display v
versionRequirement
| isAnyVersion range = ""
| otherwise = " version " ++ display range
-- Adds pkgconfig dependencies to the build info for a component
addPkgConfigBI compBI setCompBI comp = do
bi <- pkgconfigBuildInfo (pkgconfigDepends (compBI comp))
return $ setCompBI comp (compBI comp `mappend` bi)
-- Adds pkgconfig dependencies to the build info for a library
addPkgConfigBILib = addPkgConfigBI libBuildInfo $
\lib bi -> lib { libBuildInfo = bi }
-- Adds pkgconfig dependencies to the build info for an executable
addPkgConfigBIExe = addPkgConfigBI buildInfo $
\exe bi -> exe { buildInfo = bi }
-- Adds pkgconfig dependencies to the build info for a test suite
addPkgConfigBITest = addPkgConfigBI testBuildInfo $
\test bi -> test { testBuildInfo = bi }
-- Adds pkgconfig dependencies to the build info for a benchmark
addPkgConfigBIBench = addPkgConfigBI benchmarkBuildInfo $
\bench bi -> bench { benchmarkBuildInfo = bi }
pkgconfigBuildInfo :: [Dependency] -> IO BuildInfo
pkgconfigBuildInfo [] = return Mon.mempty
pkgconfigBuildInfo pkgdeps = do
let pkgs = nub [ display pkg | Dependency pkg _ <- pkgdeps ]
ccflags <- pkgconfig ("--cflags" : pkgs)
ldflags <- pkgconfig ("--libs" : pkgs)
return (ccLdOptionsBuildInfo (words ccflags) (words ldflags))
-- | Makes a 'BuildInfo' from C compiler and linker flags.
--
-- This can be used with the output from configuration programs like pkg-config
-- and similar package-specific programs like mysql-config, freealut-config etc.
-- For example:
--
-- > ccflags <- rawSystemProgramStdoutConf verbosity prog conf ["--cflags"]
-- > ldflags <- rawSystemProgramStdoutConf verbosity prog conf ["--libs"]
-- > return (ccldOptionsBuildInfo (words ccflags) (words ldflags))
--
ccLdOptionsBuildInfo :: [String] -> [String] -> BuildInfo
ccLdOptionsBuildInfo cflags ldflags =
let (includeDirs', cflags') = partition ("-I" `isPrefixOf`) cflags
(extraLibs', ldflags') = partition ("-l" `isPrefixOf`) ldflags
(extraLibDirs', ldflags'') = partition ("-L" `isPrefixOf`) ldflags'
in mempty {
PD.includeDirs = map (drop 2) includeDirs',
PD.extraLibs = map (drop 2) extraLibs',
PD.extraLibDirs = map (drop 2) extraLibDirs',
PD.ccOptions = cflags',
PD.ldOptions = ldflags''
}
-- -----------------------------------------------------------------------------
-- Determining the compiler details
configCompilerAuxEx :: ConfigFlags
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerAuxEx cfg = configCompilerEx (flagToMaybe $ configHcFlavor cfg)
(flagToMaybe $ configHcPath cfg)
(flagToMaybe $ configHcPkg cfg)
programsConfig
(fromFlag (configVerbosity cfg))
where
programsConfig = mkProgramsConfig cfg defaultProgramConfiguration
configCompilerEx :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> Verbosity
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerEx Nothing _ _ _ _ = die "Unknown compiler"
configCompilerEx (Just hcFlavor) hcPath hcPkg conf verbosity = do
(comp, maybePlatform, programsConfig) <- case hcFlavor of
GHC -> GHC.configure verbosity hcPath hcPkg conf
GHCJS -> GHCJS.configure verbosity hcPath hcPkg conf
JHC -> JHC.configure verbosity hcPath hcPkg conf
LHC -> do (_, _, ghcConf) <- GHC.configure verbosity Nothing hcPkg conf
LHC.configure verbosity hcPath Nothing ghcConf
UHC -> UHC.configure verbosity hcPath hcPkg conf
HaskellSuite {} -> HaskellSuite.configure verbosity hcPath hcPkg conf
_ -> die "Unknown compiler"
return (comp, fromMaybe buildPlatform maybePlatform, programsConfig)
-- Ideally we would like to not have separate configCompiler* and
-- configCompiler*Ex sets of functions, but there are many custom setup scripts
-- in the wild that are using them, so the versions with old types are kept for
-- backwards compatibility. Platform was added to the return triple in 1.18.
{-# DEPRECATED configCompiler
"'configCompiler' is deprecated. Use 'configCompilerEx' instead." #-}
configCompiler :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> Verbosity
-> IO (Compiler, ProgramConfiguration)
configCompiler mFlavor hcPath hcPkg conf verbosity =
fmap (\(a,_,b) -> (a,b)) $ configCompilerEx mFlavor hcPath hcPkg conf verbosity
{-# DEPRECATED configCompilerAux
"configCompilerAux is deprecated. Use 'configCompilerAuxEx' instead." #-}
configCompilerAux :: ConfigFlags
-> IO (Compiler, ProgramConfiguration)
configCompilerAux = fmap (\(a,_,b) -> (a,b)) . configCompilerAuxEx
-- -----------------------------------------------------------------------------
-- Making the internal component graph
-- | Given the package description and the set of package names which
-- are considered internal (the current package name and any internal
-- libraries are considered internal), create a graph of dependencies
-- between the components. This is NOT necessarily the build order
-- (although it is in the absence of Backpack.)
--
-- TODO: tighten up the type of 'internalPkgDeps'
mkComponentsGraph :: PackageDescription
-> [PackageId]
-> Either [ComponentName]
[(Component, [ComponentName])]
mkComponentsGraph pkg_descr internalPkgDeps =
let graph = [ (c, componentName c, componentDeps c)
| c <- pkgEnabledComponents pkg_descr ]
in case checkComponentsCyclic graph of
Just ccycle -> Left [ cname | (_,cname,_) <- ccycle ]
Nothing -> Right [ (c, cdeps) | (c, _, cdeps) <- topSortFromEdges graph ]
where
-- The dependencies for the given component
componentDeps component =
[ CExeName toolname | Dependency (PackageName toolname) _
<- buildTools bi
, toolname `elem` map exeName
(executables pkg_descr) ]
++ [ CLibName toolname | Dependency pkgname@(PackageName toolname) _
<- targetBuildDepends bi
, pkgname `elem` map packageName internalPkgDeps ]
where
bi = componentBuildInfo component
reportComponentCycle :: [ComponentName] -> IO a
reportComponentCycle cnames =
die $ "Components in the package depend on each other in a cyclic way:\n "
++ intercalate " depends on "
[ "'" ++ showComponentName cname ++ "'"
| cname <- cnames ++ [head cnames] ]
-- | This method computes a default, "good enough" 'ComponentId'
-- for a package. The intent is that cabal-install (or the user) will
-- specify a more detailed IPID via the @--ipid@ flag if necessary.
computeComponentId
:: Flag String
-> PackageIdentifier
-> ComponentName
-- TODO: careful here!
-> [ComponentId] -- IPIDs of the component dependencies
-> FlagAssignment
-> ComponentId
computeComponentId mb_explicit pid cname dep_ipids flagAssignment = do
-- show is found to be faster than intercalate and then replacement of
-- special character used in intercalating. We cannot simply hash by
-- doubly concating list, as it just flatten out the nested list, so
-- different sources can produce same hash
let hash = hashToBase62 $
-- For safety, include the package + version here
-- for GHC 7.10, where just the hash is used as
-- the package key
display pid
++ show dep_ipids
++ show flagAssignment
generated_base = display pid ++ "-" ++ hash
explicit_base cid0 = fromPathTemplate (InstallDirs.substPathTemplate env
(toPathTemplate cid0))
-- Hack to reuse install dirs machinery
-- NB: no real IPID available at this point
where env = packageTemplateEnv pid (mkUnitId "")
actual_base = case mb_explicit of
Flag cid0 -> explicit_base cid0
NoFlag -> generated_base
ComponentId $ actual_base
++ (case componentNameString (pkgName pid) cname of
Nothing -> ""
Just s -> "-" ++ s)
hashToBase62 :: String -> String
hashToBase62 s = showFingerprint $ fingerprintString s
where
showIntAtBase62 x = showIntAtBase 62 representBase62 x ""
representBase62 x
| x < 10 = chr (48 + x)
| x < 36 = chr (65 + x - 10)
| x < 62 = chr (97 + x - 36)
| otherwise = '@'
showFingerprint (Fingerprint a b) = showIntAtBase62 a ++ showIntAtBase62 b
-- | Computes the package name for a library. If this is the public
-- library, it will just be the original package name; otherwise,
-- it will be a munged package name recording the original package
-- name as well as the name of the internal library.
--
-- A lot of tooling in the Haskell ecosystem assumes that if something
-- is installed to the package database with the package name 'foo',
-- then it actually is an entry for the (only public) library in package
-- 'foo'. With internal packages, this is not necessarily true:
-- a public library as well as arbitrarily many internal libraries may
-- come from the same package. To prevent tools from getting confused
-- in this case, the package name of these internal libraries is munged
-- so that they do not conflict the public library proper.
--
-- We munge into a reserved namespace, "z-", and encode both the
-- component name and the package name of an internal library using the
-- following format:
--
-- compat-pkg-name ::= "z-" package-name "-z-" library-name
--
-- where package-name and library-name have "-" ( "z" + ) "-"
-- segments encoded by adding an extra "z".
--
-- When we have the public library, the compat-pkg-name is just the
-- package-name, no surprises there!
--
computeCompatPackageName :: PackageName -> ComponentName -> PackageName
computeCompatPackageName pkg_name cname
| Just cname_str <- componentNameString pkg_name cname
= let zdashcode s = go s (Nothing :: Maybe Int) []
where go [] _ r = reverse r
go ('-':z) (Just n) r | n > 0 = go z (Just 0) ('-':'z':r)
go ('-':z) _ r = go z (Just 0) ('-':r)
go ('z':z) (Just n) r = go z (Just (n+1)) ('z':r)
go (c:z) _ r = go z Nothing (c:r)
in PackageName $ "z-" ++ zdashcode (display pkg_name)
++ "-z-" ++ zdashcode cname_str
| otherwise
= pkg_name
-- | In GHC 8.0, the string we pass to GHC to use for symbol
-- names for a package can be an arbitrary, IPID-compatible string.
-- However, prior to GHC 8.0 there are some restrictions on what
-- format this string can be (due to how ghc-pkg parsed the key):
--
-- 1. In GHC 7.10, the string had either be of the form
-- foo_ABCD, where foo is a non-semantic alphanumeric/hyphenated
-- prefix and ABCD is two base-64 encoded 64-bit integers,
-- or a GHC 7.8 style identifier.
--
-- 2. In GHC 7.8, the string had to be a valid package identifier
-- like foo-0.1.
--
-- So, the problem is that Cabal, in general, has a general IPID,
-- but needs to figure out a package key / package ID that the
-- old ghc-pkg will actually accept. But there's an EVERY WORSE
-- problem: if ghc-pkg decides to parse an identifier foo-0.1-xxx
-- as if it were a package identifier, which means it will SILENTLY
-- DROP the "xxx" (because it's a tag, and Cabal does not allow tags.)
-- So we must CONNIVE to ensure that we don't pick something that
-- looks like this.
--
-- So this function attempts to define a mapping into the old formats.
--
-- The mapping for GHC 7.8 and before:
--
-- * We use the *compatibility* package name and version. For
-- public libraries this is just the package identifier; for
-- internal libraries, it's something like "z-pkgname-z-libname-0.1".
-- See 'computeCompatPackageName' for more details.
--
-- The mapping for GHC 7.10:
--
-- * For CLibName:
-- If the IPID is of the form foo-0.1-ABCDEF where foo_ABCDEF would
-- validly parse as a package key, we pass "ABCDEF". (NB: not
-- all hashes parse this way, because GHC 7.10 mandated that
-- these hashes be two base-62 encoded 64 bit integers),
-- but hashes that Cabal generated using 'computeComponentId'
-- are guaranteed to have this form.
--
-- If it is not of this form, we rehash the IPID into the
-- correct form and pass that.
--
-- * For sub-components, we rehash the IPID into the correct format
-- and pass that.
--
computeCompatPackageKey
:: Compiler
-> PackageName
-> Version
-> UnitId
-> String
computeCompatPackageKey comp pkg_name pkg_version (SimpleUnitId (ComponentId str))
| not (packageKeySupported comp) =
display pkg_name ++ "-" ++ display pkg_version
| not (unifiedIPIDRequired comp) =
let mb_verbatim_key
= case simpleParse str :: Maybe PackageId of
-- Something like 'foo-0.1', use it verbatim.
-- (NB: hash tags look like tags, so they are parsed,
-- so the extra equality check tests if a tag was dropped.)
Just pid0 | display pid0 == str -> Just str
_ -> Nothing
mb_truncated_key
= let cand = reverse (takeWhile isAlphaNum (reverse str))
in if length cand == 22 && all isAlphaNum cand
then Just cand
else Nothing
rehashed_key = hashToBase62 str
in fromMaybe rehashed_key (mb_verbatim_key `mplus` mb_truncated_key)
| otherwise = str
topSortFromEdges :: Ord key => [(node, key, [key])]
-> [(node, key, [key])]
topSortFromEdges es =
let (graph, vertexToNode, _) = graphFromEdges es
in reverse (map vertexToNode (topSort graph))
mkComponentsLocalBuildInfo :: ConfigFlags
-> Compiler
-> InstalledPackageIndex
-> PackageDescription
-> [PackageId] -- internal package deps
-> [InstalledPackageInfo] -- external package deps
-> [(Component, [ComponentName])]
-> FlagAssignment
-> IO [(ComponentLocalBuildInfo,
[UnitId])]
mkComponentsLocalBuildInfo cfg comp installedPackages pkg_descr
internalPkgDeps externalPkgDeps
graph flagAssignment =
foldM go [] graph
where
go z (component, dep_cnames) = do
clbi <- componentLocalBuildInfo z component
-- NB: We want to preserve cdeps because it contains extra
-- information like build-tools ordering
let dep_uids = [ componentUnitId dep_clbi
| cname <- dep_cnames
-- Being in z relies on topsort!
, (dep_clbi, _) <- z
, componentLocalName dep_clbi == cname ]
return ((clbi, dep_uids):z)
-- The allPkgDeps contains all the package deps for the whole package
-- but we need to select the subset for this specific component.
-- we just take the subset for the package names this component
-- needs. Note, this only works because we cannot yet depend on two
-- versions of the same package.
componentLocalBuildInfo :: [(ComponentLocalBuildInfo, [UnitId])]
-> Component -> IO ComponentLocalBuildInfo
componentLocalBuildInfo internalComps component =
case component of
CLib lib -> do
let exports = map (\n -> Installed.ExposedModule n Nothing)
(PD.exposedModules lib)
mb_reexports = resolveModuleReexports installedPackages
(packageId pkg_descr)
uid
externalPkgDeps lib
reexports <- case mb_reexports of
Left problems -> reportModuleReexportProblems problems
Right r -> return r
return LibComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentUnitId = uid,
componentLocalName = componentName component,
componentIsPublic = libName lib == display (packageName (package pkg_descr)),
componentCompatPackageKey = compat_key,
componentCompatPackageName = compat_name,
componentIncludes = includes,
componentExposedModules = exports ++ reexports
}
CExe _ ->
return ExeComponentLocalBuildInfo {
componentUnitId = uid,
componentLocalName = componentName component,
componentPackageDeps = cpds,
componentIncludes = includes
}
CTest _ ->
return TestComponentLocalBuildInfo {
componentUnitId = uid,
componentLocalName = componentName component,
componentPackageDeps = cpds,
componentIncludes = includes
}
CBench _ ->
return BenchComponentLocalBuildInfo {
componentUnitId = uid,
componentLocalName = componentName component,
componentPackageDeps = cpds,
componentIncludes = includes
}
where
-- TODO configIPID should have name changed
cid = computeComponentId (configIPID cfg) (package pkg_descr)
(componentName component)
(getDeps (componentName component))
flagAssignment
uid = SimpleUnitId cid
PackageIdentifier pkg_name pkg_ver = package pkg_descr
compat_name = computeCompatPackageName pkg_name (componentName component)
compat_key = computeCompatPackageKey comp compat_name pkg_ver uid
bi = componentBuildInfo component
lookupInternalPkg :: PackageId -> UnitId
lookupInternalPkg pkgid = do
let matcher (clbi, _)
| CLibName str <- componentLocalName clbi
, str == display (pkgName pkgid)
= Just (componentUnitId clbi)
matcher _ = Nothing
case catMaybes (map matcher internalComps) of
[x] -> x
_ -> error "lookupInternalPkg"
cpds = if newPackageDepsBehaviour pkg_descr
then dedup $
[ (Installed.installedUnitId pkg, packageId pkg)
| pkg <- selectSubset bi externalPkgDeps ]
++ [ (lookupInternalPkg pkgid, pkgid)
| pkgid <- selectSubset bi internalPkgDeps ]
else [ (Installed.installedUnitId pkg, packageId pkg)
| pkg <- externalPkgDeps ]
includes = map (\(i,p) -> (i,lookupRenaming p cprns)) cpds
cprns = if newPackageDepsBehaviour pkg_descr
then targetBuildRenaming bi
else Map.empty
dedup = Map.toList . Map.fromList
-- TODO: this should include internal deps too
getDeps :: ComponentName -> [ComponentId]
getDeps cname =
let externalPkgs
= maybe [] (\lib -> selectSubset (componentBuildInfo lib)
externalPkgDeps)
(lookupComponent pkg_descr cname)
in map Installed.installedComponentId externalPkgs
selectSubset :: Package pkg => BuildInfo -> [pkg] -> [pkg]
selectSubset bi pkgs =
[ pkg | pkg <- pkgs, packageName pkg `elem` names bi ]
names :: BuildInfo -> [PackageName]
names bi = [ name | Dependency name _ <- targetBuildDepends bi ]
-- | Given the author-specified re-export declarations from the .cabal file,
-- resolve them to the form that we need for the package database.
--
-- An invariant of the package database is that we always link the re-export
-- directly to its original defining location (rather than indirectly via a
-- chain of re-exporting packages).
--
resolveModuleReexports :: InstalledPackageIndex
-> PackageId
-> UnitId
-> [InstalledPackageInfo]
-> Library
-> Either [(ModuleReexport, String)] -- errors
[Installed.ExposedModule] -- ok
resolveModuleReexports installedPackages srcpkgid key externalPkgDeps lib =
case partitionEithers
(map resolveModuleReexport (PD.reexportedModules lib)) of
([], ok) -> Right ok
(errs, _) -> Left errs
where
-- A mapping from visible module names to their original defining
-- module name. We also record the package name of the package which
-- *immediately* provided the module (not the original) to handle if the
-- user explicitly says which build-depends they want to reexport from.
visibleModules :: Map ModuleName [(PackageName, Installed.ExposedModule)]
visibleModules =
Map.fromListWith (++) $
[ (Installed.exposedName exposedModule, [(exportingPackageName,
exposedModule)])
-- The package index here contains all the indirect deps of the
-- package we're configuring, but we want just the direct deps
| let directDeps = Set.fromList
(map Installed.installedUnitId externalPkgDeps)
, pkg <- PackageIndex.allPackages installedPackages
, Installed.installedUnitId pkg `Set.member` directDeps
, let exportingPackageName = packageName pkg
, exposedModule <- visibleModuleDetails pkg
]
++ [ (visibleModuleName, [(exportingPackageName, exposedModule)])
| visibleModuleName <- PD.exposedModules lib
++ otherModules (libBuildInfo lib)
, let exportingPackageName = packageName srcpkgid
definingModuleName = visibleModuleName
definingPackageId = key
originalModule = Module definingPackageId
definingModuleName
exposedModule = Installed.ExposedModule visibleModuleName
(Just originalModule)
]
-- All the modules exported from this package and their defining name and
-- package (either defined here in this package or re-exported from some
-- other package). Return an ExposedModule because we want to hold onto
-- signature information.
visibleModuleDetails :: InstalledPackageInfo -> [Installed.ExposedModule]
visibleModuleDetails pkg = do
exposedModule <- Installed.exposedModules pkg
case Installed.exposedReexport exposedModule of
-- The first case is the modules actually defined in this package.
-- In this case the reexport will point to this package.
Nothing -> return exposedModule {
Installed.exposedReexport =
Just (Module
(Installed.installedUnitId pkg)
(Installed.exposedName exposedModule)) }
-- On the other hand, a visible module might actually be itself
-- a re-export! In this case, the re-export info for the package
-- doing the re-export will point us to the original defining
-- module name and package, so we can reuse the entry.
Just _ -> return exposedModule
resolveModuleReexport reexport@ModuleReexport {
moduleReexportOriginalPackage = moriginalPackageName,
moduleReexportOriginalName = originalName,
moduleReexportName = newName
} =
let filterForSpecificPackage =
case moriginalPackageName of
Nothing -> id
Just originalPackageName ->
filter (\(pkgname, _) -> pkgname == originalPackageName)
matches = filterForSpecificPackage
(Map.findWithDefault [] originalName visibleModules)
in
case (matches, moriginalPackageName) of
((_, exposedModule):rest, _)
-- TODO: Refine this check for signatures
| all (\(_, exposedModule') ->
Installed.exposedReexport exposedModule
== Installed.exposedReexport exposedModule') rest
-> Right exposedModule { Installed.exposedName = newName }
([], Just originalPackageName)
-> Left $ (,) reexport
$ "The package " ++ display originalPackageName
++ " does not export a module " ++ display originalName
([], Nothing)
-> Left $ (,) reexport
$ "The module " ++ display originalName
++ " is not exported by any suitable package (this package "
++ "itself nor any of its 'build-depends' dependencies)."
(ms, _)
-> Left $ (,) reexport
$ "The module " ++ display originalName ++ " is exported "
++ "by more than one package ("
++ intercalate ", " [ display pkgname | (pkgname,_) <- ms ]
++ ") and so the re-export is ambiguous. The ambiguity can "
++ "be resolved by qualifying by the package name. The "
++ "syntax is 'packagename:moduleName [as newname]'."
-- Note: if in future Cabal allows directly depending on multiple
-- instances of the same package (e.g. backpack) then an additional
-- ambiguity case is possible here: (_, Just originalPackageName)
-- with the module being ambiguous despite being qualified by a
-- package name. Presumably by that time we'll have a mechanism to
-- qualify the instance we're referring to.
reportModuleReexportProblems :: [(ModuleReexport, String)] -> IO a
reportModuleReexportProblems reexportProblems =
die $ unlines
[ "Problem with the module re-export '" ++ display reexport ++ "': " ++ msg
| (reexport, msg) <- reexportProblems ]
-- -----------------------------------------------------------------------------
-- Testing C lib and header dependencies
-- Try to build a test C program which includes every header and links every
-- lib. If that fails, try to narrow it down by preprocessing (only) and linking
-- with individual headers and libs. If none is the obvious culprit then give a
-- generic error message.
-- TODO: produce a log file from the compiler errors, if any.
checkForeignDeps :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO ()
checkForeignDeps pkg lbi verbosity = do
ifBuildsWith allHeaders (commonCcArgs ++ makeLdArgs allLibs) -- I'm feeling
-- lucky
(return ())
(do missingLibs <- findMissingLibs
missingHdr <- findOffendingHdr
explainErrors missingHdr missingLibs)
where
allHeaders = collectField PD.includes
allLibs = collectField PD.extraLibs
ifBuildsWith headers args success failure = do
ok <- builds (makeProgram headers) args
if ok then success else failure
findOffendingHdr =
ifBuildsWith allHeaders ccArgs
(return Nothing)
(go . tail . inits $ allHeaders)
where
go [] = return Nothing -- cannot happen
go (hdrs:hdrsInits) =
-- Try just preprocessing first
ifBuildsWith hdrs cppArgs
-- If that works, try compiling too
(ifBuildsWith hdrs ccArgs
(go hdrsInits)
(return . Just . Right . last $ hdrs))
(return . Just . Left . last $ hdrs)
cppArgs = "-E":commonCppArgs -- preprocess only
ccArgs = "-c":commonCcArgs -- don't try to link
findMissingLibs = ifBuildsWith [] (makeLdArgs allLibs)
(return [])
(filterM (fmap not . libExists) allLibs)
libExists lib = builds (makeProgram []) (makeLdArgs [lib])
commonCppArgs = platformDefines lbi
-- TODO: This is a massive hack, to work around the
-- fact that the test performed here should be
-- PER-component (c.f. the "I'm Feeling Lucky"; we
-- should NOT be glomming everything together.)
++ [ "-I" ++ buildDir lbi </> "autogen" ]
++ [ "-I" ++ dir | dir <- collectField PD.includeDirs ]
++ ["-I."]
++ collectField PD.cppOptions
++ collectField PD.ccOptions
++ [ "-I" ++ dir
| dep <- deps
, dir <- Installed.includeDirs dep ]
++ [ opt
| dep <- deps
, opt <- Installed.ccOptions dep ]
commonCcArgs = commonCppArgs
++ collectField PD.ccOptions
++ [ opt
| dep <- deps
, opt <- Installed.ccOptions dep ]
commonLdArgs = [ "-L" ++ dir | dir <- collectField PD.extraLibDirs ]
++ collectField PD.ldOptions
++ [ "-L" ++ dir
| dep <- deps
, dir <- Installed.libraryDirs dep ]
--TODO: do we also need dependent packages' ld options?
makeLdArgs libs = [ "-l"++lib | lib <- libs ] ++ commonLdArgs
makeProgram hdrs = unlines $
[ "#include \"" ++ hdr ++ "\"" | hdr <- hdrs ] ++
["int main(int argc, char** argv) { return 0; }"]
collectField f = concatMap f allBi
allBi = allBuildInfo pkg
deps = PackageIndex.topologicalOrder (installedPkgs lbi)
builds program args = do
tempDir <- getTemporaryDirectory
withTempFile tempDir ".c" $ \cName cHnd ->
withTempFile tempDir "" $ \oNname oHnd -> do
hPutStrLn cHnd program
hClose cHnd
hClose oHnd
_ <- rawSystemProgramStdoutConf verbosity
gccProgram (withPrograms lbi) (cName:"-o":oNname:args)
return True
`catchIO` (\_ -> return False)
`catchExit` (\_ -> return False)
explainErrors Nothing [] = return () -- should be impossible!
explainErrors _ _
| isNothing . lookupProgram gccProgram . withPrograms $ lbi
= die $ unlines $
[ "No working gcc",
"This package depends on foreign library but we cannot "
++ "find a working C compiler. If you have it in a "
++ "non-standard location you can use the --with-gcc "
++ "flag to specify it." ]
explainErrors hdr libs = die $ unlines $
[ if plural
then "Missing dependencies on foreign libraries:"
else "Missing dependency on a foreign library:"
| missing ]
++ case hdr of
Just (Left h) -> ["* Missing (or bad) header file: " ++ h ]
_ -> []
++ case libs of
[] -> []
[lib] -> ["* Missing C library: " ++ lib]
_ -> ["* Missing C libraries: " ++ intercalate ", " libs]
++ [if plural then messagePlural else messageSingular | missing]
++ case hdr of
Just (Left _) -> [ headerCppMessage ]
Just (Right h) -> [ (if missing then "* " else "")
++ "Bad header file: " ++ h
, headerCcMessage ]
_ -> []
where
plural = length libs >= 2
-- Is there something missing? (as opposed to broken)
missing = not (null libs)
|| case hdr of Just (Left _) -> True; _ -> False
messageSingular =
"This problem can usually be solved by installing the system "
++ "package that provides this library (you may need the "
++ "\"-dev\" version). If the library is already installed "
++ "but in a non-standard location then you can use the flags "
++ "--extra-include-dirs= and --extra-lib-dirs= to specify "
++ "where it is."
messagePlural =
"This problem can usually be solved by installing the system "
++ "packages that provide these libraries (you may need the "
++ "\"-dev\" versions). If the libraries are already installed "
++ "but in a non-standard location then you can use the flags "
++ "--extra-include-dirs= and --extra-lib-dirs= to specify "
++ "where they are."
headerCppMessage =
"If the header file does exist, it may contain errors that "
++ "are caught by the C compiler at the preprocessing stage. "
++ "In this case you can re-run configure with the verbosity "
++ "flag -v3 to see the error messages."
headerCcMessage =
"The header file contains a compile error. "
++ "You can re-run configure with the verbosity flag "
++ "-v3 to see the error messages from the C compiler."
-- | Output package check warnings and errors. Exit if any errors.
checkPackageProblems :: Verbosity
-> GenericPackageDescription
-> PackageDescription
-> IO ()
checkPackageProblems verbosity gpkg pkg = do
ioChecks <- checkPackageFiles pkg "."
let pureChecks = checkPackage gpkg (Just pkg)
errors = [ e | PackageBuildImpossible e <- pureChecks ++ ioChecks ]
warnings = [ w | PackageBuildWarning w <- pureChecks ++ ioChecks ]
if null errors
then mapM_ (warn verbosity) warnings
else die (intercalate "\n\n" errors)
-- | Preform checks if a relocatable build is allowed
checkRelocatable :: Verbosity
-> PackageDescription
-> LocalBuildInfo
-> IO ()
checkRelocatable verbosity pkg lbi
= sequence_ [ checkOS
, checkCompiler
, packagePrefixRelative
, depsPrefixRelative
]
where
-- Check if the OS support relocatable builds.
--
-- If you add new OS' to this list, and your OS supports dynamic libraries
-- and RPATH, make sure you add your OS to RPATH-support list of:
-- Distribution.Simple.GHC.getRPaths
checkOS
= unless (os `elem` [ OSX, Linux ])
$ die $ "Operating system: " ++ display os ++
", does not support relocatable builds"
where
(Platform _ os) = hostPlatform lbi
-- Check if the Compiler support relocatable builds
checkCompiler
= unless (compilerFlavor comp `elem` [ GHC ])
$ die $ "Compiler: " ++ show comp ++
", does not support relocatable builds"
where
comp = compiler lbi
-- Check if all the install dirs are relative to same prefix
packagePrefixRelative
= unless (relativeInstallDirs installDirs)
$ die $ "Installation directories are not prefix_relative:\n" ++
show installDirs
where
-- NB: should be good enough to check this against the default
-- component ID, but if we wanted to be strictly correct we'd
-- check for each ComponentId.
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
p = prefix installDirs
relativeInstallDirs (InstallDirs {..}) =
all isJust
(fmap (stripPrefix p)
[ bindir, libdir, dynlibdir, libexecdir, includedir, datadir
, docdir, mandir, htmldir, haddockdir, sysconfdir] )
-- Check if the library dirs of the dependencies that are in the package
-- database to which the package is installed are relative to the
-- prefix of the package
depsPrefixRelative = do
pkgr <- GHC.pkgRoot verbosity lbi (last (withPackageDB lbi))
mapM_ (doCheck pkgr) ipkgs
where
doCheck pkgr ipkg
| maybe False (== pkgr) (Installed.pkgRoot ipkg)
= mapM_ (\l -> when (isNothing $ stripPrefix p l) (die (msg l)))
(Installed.libraryDirs ipkg)
| otherwise
= return ()
-- NB: should be good enough to check this against the default
-- component ID, but if we wanted to be strictly correct we'd
-- check for each ComponentId.
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
p = prefix installDirs
ipkgs = PackageIndex.allPackages (installedPkgs lbi)
msg l = "Library directory of a dependency: " ++ show l ++
"\nis not relative to the installation prefix:\n" ++
show p
|
headprogrammingczar/cabal
|
Cabal/Distribution/Simple/Configure.hs
|
bsd-3-clause
| 98,539 | 1 | 22 | 29,672 | 15,976 | 8,378 | 7,598 | 1,394 | 15 |
module Distribution.Server.Pages.Util
( hackageNotFound
, hackageError
, makeInput
, makeCheckbox
, packageType
) where
import Data.List (intercalate)
import Distribution.Server.Pages.Template (hackagePage)
import Text.XHtml.Strict
hackageNotFound :: HTML a => a -> Html
hackageNotFound contents
= hackagePage "Not Found" [toHtml contents]
hackageError :: HTML a => a -> Html
hackageError contents
= hackagePage "Error" [toHtml contents]
makeInput :: [HtmlAttr] -> String -> String -> [Html]
makeInput attrs fname labelName = [label ! [thefor fname] << labelName,
input ! (attrs ++ [name fname, identifier fname])]
makeCheckbox :: Bool -> String -> String -> String -> [Html]
makeCheckbox isChecked fname fvalue labelName = [input ! ([thetype "checkbox", name fname, identifier fname, value fvalue]
++ if isChecked then [checked] else []),
toHtml " ",
label ! [thefor fname] << labelName]
packageType :: Bool
-> Int
-> Int
-> Int
-> String
packageType hasLibrary numExes numTests numBenches =
commaSeparate $ map format components
where
components = (if hasLibrary then [Library] else [])
++ (if numExes > 0 then [Programs numExes] else [])
++ (if numTests > 0 then [Tests numTests] else [])
++ (if numBenches > 0 then [Benchmarks numBenches] else [])
format Library = "library"
format (Programs n) = "program" ++ suffix n
format (Tests n) = "test" ++ suffix n
format (Benchmarks n) = "benchmark" ++ suffix n
suffix n = if n > 1 then "s" else ""
commaSeparate [] = []
commaSeparate [x] = x
commaSeparate xs = let (init', [last']) = splitAt (length xs - 1) xs
in intercalate ", " init' ++ " and " ++ last'
data Component = Library
| Programs !Int
| Tests !Int
| Benchmarks !Int
|
agrafix/hackage-server
|
Distribution/Server/Pages/Util.hs
|
bsd-3-clause
| 2,154 | 0 | 14 | 746 | 657 | 350 | 307 | 53 | 11 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Retest Add-On</title>
<maps>
<homeID>retest</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/retest/src/main/javahelp/org/zaproxy/addon/retest/resources/help_ko_KR/helpset_ko_KR.hs
|
apache-2.0
| 961 | 77 | 67 | 156 | 411 | 208 | 203 | -1 | -1 |
module TestTerms where
import qualified Src as S
import Core
import Language.Java.Syntax as J (Op(..))
import Prelude hiding (const)
-- mu loop. \x -> loop x
loopStr = "fix loop. \\(x : Int). loop x : Int -> Int"
loop = fix (\loop x -> App (var loop) (var x)) (JClass "java.lang.Integer") (JClass "java.lang.Integer")
factStr = "fix fact. \\(n : Int). if n == 0 then 1 else n * fact (n-1) : Int"
fact = fix (\fact n ->
If (PrimOp (var n) (S.Compare J.Equal) (Lit (S.Int 0)))
(Lit (S.Int 1))
(PrimOp (var n) (S.Arith J.Mult) (App (var fact) (PrimOp (var n) (S.Arith J.Sub) (Lit (S.Int 1))))))
(JClass "java.lang.Integer") (JClass "java.lang.Integer")
tfact = fix (\fact n -> lam (JClass "java.lang.Integer") (\acc ->
If (PrimOp (var n) (S.Compare J.Equal) (Lit (S.Int 0)))
(var acc)
(App (App (var fact) (PrimOp (var n) (S.Arith J.Sub) (Lit (S.Int 1)))) (PrimOp (var n) (S.Arith J.Mult) (var acc)))))
(JClass "java.lang.Integer") (Fun (JClass "java.lang.Integer") (JClass "java.lang.Integer"))
fiboStr = "fix fibo. \\(n : Int). if0 n then 1 else (fibo (n-1)) + (fibo (n-2)) : Int"
fibo = fix (\fibo n ->
If (PrimOp (var n) (S.Compare J.Equal) (Lit (S.Int 2)))
(Lit (S.Int 1))
(If (PrimOp (var n) (S.Compare J.Equal) (Lit (S.Int 1)))
(Lit (S.Int 1))
(PrimOp (App (var fibo) (PrimOp (var n) (S.Arith J.Sub) (Lit (S.Int 1)))) (S.Arith J.Add) (App (var fibo) (PrimOp (var n) (S.Arith J.Sub) (Lit (S.Int 2)))))))
(JClass "java.lang.Integer") (JClass "java.lang.Integer")
factApp = App fact (Lit (S.Int 10))
fiboApp = App fibo (Lit (S.Int 10))
-- /\A. \(x:A) . x
idF1Str = "/\\A. \\(x:A). x"
idF = bLam (\a -> lam (tVar a) (\x -> var x))
-- /\A . (\(f : A -> A) . \(x : A) . f x) (idF A)
idF2Str = "/\\A. (\\(f : A -> A). \\(x : A). f x) (idF A)"
idF2 = bLam (\a -> App (lam (Fun (tVar a) (tVar a)) (\f -> lam (tVar a) (\x -> App (var f) (var x)))) (TApp idF (tVar a)))
-- /\A . \(x:A) . (idF A) x
idF3Str = "/\\A . \\(x:A) . (idF A) x"
idF3 = bLam (\a -> lam (tVar a) (\x -> App (TApp idF (tVar a)) (var x) ))
notailStr = "/\\A. \\(f : A -> (A -> A)). \\(g : A -> A). \\(x : A). (f x) (g x)"
notail =
bLam (\a ->
lam (Fun (tVar a) (Fun (tVar a) (tVar a))) (\f ->
lam (Fun (tVar a) (tVar a)) (\g ->
lam (tVar a) (\x ->
App (App (var f) (var x)) (App (var g) (var x)) ))))
constStr = "/\\A . \\(x : A) . \\(y : A) . x"
const =
bLam (\a ->
lam (tVar a) (\x ->
lam (tVar a) (\y ->
var x
)
)
)
-- /\A . \(x : A) . (/\A . \(f : A -> A -> A) . \(g : A -> A) . \(x : A) . f x (g x)) A (const A) (idF A) x
-- /\A . \(x : A) . notail A (const A) (idF A) x
program1 =
bLam (\a ->
lam (tVar a) (\x ->
App (App (App (TApp notail (tVar a)) (TApp const (tVar a))) (TApp idF (tVar a))) (var x)
)
)
program1Num = App (TApp program1 (JClass "java.lang.Integer")) (Lit (S.Int 5))
-- should infer (forall (x0 : int) . int)
intapp = TApp idF (JClass "java.lang.Integer")
notail2Str = "/\\A. \\(f : A -> (A -> A)). \\(x : A). \\(y : A). (f x) ((f y) y)"
notail2 =
bLam (\a ->
lam (Fun (tVar a) (Fun (tVar a) (tVar a))) (\f ->
lam (tVar a) (\x ->
lam (tVar a) (\y ->
App (App (var f) (var x)) (App (App (var f) (var y)) (var y)) ))))
program2 = App (App (App (TApp notail2 (JClass "java.lang.Integer")) (TApp const (JClass "java.lang.Integer"))) (Lit (S.Int 5))) (Lit (S.Int 6))
idfNum = App (TApp idF (JClass "java.lang.Integer")) (Lit (S.Int 10))
constNum = App (App (TApp const (JClass "java.lang.Integer")) (Lit (S.Int 10))) (Lit (S.Int 20))
notail3Str = "/\\A. \\(f : A -> (A -> A)). \\(g : A -> (A -> A)). \\(x : A). \\(y : A). (f x) ((g y) y)"
notail3 =
bLam (\a ->
lam (Fun (tVar a) (Fun (tVar a) (tVar a))) (\f ->
lam (Fun (tVar a) (Fun (tVar a) (tVar a))) (\g ->
lam (tVar a) (\x ->
lam (tVar a) (\y ->
App (App (var f) (var x)) (App (App (var g) (var y)) (var y)) )))))
program3 = App (App (App (App (TApp notail3 (JClass "java.lang.Integer")) (TApp const (JClass "java.lang.Integer"))) (TApp const (JClass "java.lang.Integer"))) (Lit (S.Int 5))) (Lit (S.Int 6))
notail4Str = "/\\A. \\(g : ((A -> A) -> (A -> A)) -> A). \\(f : A -> (A -> A)). \\(x : A). \\(y : A). (g (f x)) (f y)"
notail4 =
bLam (\a ->
lam ( Fun (Fun (tVar a) (tVar a)) (Fun (Fun (tVar a) (tVar a)) (tVar a))) (\g ->
lam (Fun (tVar a) (Fun (tVar a) (tVar a))) (\f ->
lam (tVar a) (\x ->
lam (tVar a) (\y ->
App (App (var g) (App (var f) (var x))) (App (var f) (var y)))))))
summaStr= "\\(x : Int -> Int). \\(y : Int -> Int). (x 0) + (y 0)"
summa =
lam (Fun (JClass "java.lang.Integer") (JClass "java.lang.Integer")) (\x ->
lam (Fun (JClass "java.lang.Integer") (JClass "java.lang.Integer")) (\y ->
PrimOp (App (var x) (Lit (S.Int 0))) (S.Arith J.Add) (App (var y) (Lit (S.Int 0)))
)
)
program4 = App (App (App (App (TApp notail4 (JClass "java.lang.Integer")) summa) (TApp const (JClass "java.lang.Integer"))) (Lit (S.Int 5))) (Lit (S.Int 6))
evenOdd :: String
evenOdd = "let rec even = \\n -> n == 0 || odd (n-1) and odd = \\n -> if n == 0 then 0 else even (n-1) in odd"
|
bixuanzju/fcore
|
testsuite/TestTerms.hs
|
bsd-2-clause
| 5,281 | 0 | 26 | 1,334 | 2,591 | 1,333 | 1,258 | 88 | 1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
module T17566b where
class C f where
type T1 (f :: k1)
type T2 (f :: k2)
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_fail/T17566b.hs
|
bsd-3-clause
| 135 | 0 | 7 | 30 | 35 | 23 | 12 | 6 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>Import/Export</title>
<maps>
<homeID>exim</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/exim/src/main/javahelp/help_de_DE/helpset_de_DE.hs
|
apache-2.0
| 959 | 77 | 67 | 155 | 408 | 207 | 201 | -1 | -1 |
{-# LANGUAGE KindSignatures, GADTs, TypeFamilies, MultiParamTypeClasses, FlexibleContexts, ScopedTypeVariables, TypeSynonymInstances, FlexibleInstances #-}
module T5303( showContextSeries ) where
import Control.Monad.State.Strict( StateT )
import Control.Monad.Trans ( lift )
import Data.Kind (Type)
data Tree (m :: Type -> Type) = Tree {}
data FL (a :: Type -> Type -> Type) x z where
(:>:) :: a x y -> FL a y z -> FL a x z
NilFL :: FL a x x
class (Functor m, Monad m) => ApplyMonad m (state :: (Type -> Type) -> Type)
class Apply (p :: Type -> Type -> Type) where
type ApplyState p :: (Type -> Type) -> Type
apply :: ApplyMonad m (ApplyState p) => p x y -> m ()
class (Functor m, Monad m, ApplyMonad (ApplyMonadOver m state) state)
=> ApplyMonadTrans m (state :: (Type -> Type) -> Type) where
type ApplyMonadOver m state :: Type -> Type
runApplyMonad :: (ApplyMonadOver m state) x -> state m -> m (x, state m)
instance (Functor m, Monad m) => ApplyMonadTrans m Tree where
type ApplyMonadOver m Tree = TreeMonad m
runApplyMonad = virtualTreeMonad
instance (Functor m, Monad m) => ApplyMonad (TreeMonad m) Tree
-- | Internal state of the 'TreeIO' monad. Keeps track of the current Tree
-- content, unsync'd changes and a current working directory (of the monad).
data TreeState m = TreeState { tree :: !(Tree m) }
type TreeMonad m = StateT (TreeState m) m
type TreeIO = TreeMonad IO
virtualTreeMonad :: (Functor m, Monad m) => TreeMonad m a -> Tree m -> m (a, Tree m)
virtualTreeMonad action t = undefined
applyToState :: forall p m x y. (Apply p, ApplyMonadTrans m (ApplyState p))
=> p x y -> (ApplyState p) m -> m ((ApplyState p) m)
applyToState _ _ = snd `fmap` runApplyMonad undefined undefined
showContextSeries :: (Apply p, ApplyState p ~ Tree) => FL p x y -> TreeIO ()
showContextSeries (p:>:_) = (undefined >>= lift . applyToState p) >> return ()
|
sdiehl/ghc
|
testsuite/tests/simplCore/should_compile/T5303.hs
|
bsd-3-clause
| 1,927 | 5 | 13 | 399 | 717 | 382 | 335 | -1 | -1 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE DataKinds #-}
-- | This Module defines the main functions to interact with the Pattern Matcher.
module OpenCog.AtomSpace.Query where
import Control.Monad.IO.Class (liftIO)
import Foreign (Ptr)
import Foreign.C.Types (CULong(..),CInt(..),CDouble(..))
import Foreign.C.String (CString,withCString,peekCString)
import OpenCog.AtomSpace.Api (getByHandle,getWithHandle)
import OpenCog.AtomSpace.Types (TruthVal,Atom(..))
import OpenCog.AtomSpace.Env (AtomSpaceRef(..),AtomSpace,getAtomSpace)
import OpenCog.AtomSpace.CUtils
import OpenCog.AtomSpace.Internal (Handle)
--------------------------------------------------------------------------------
foreign import ccall "PatternMatcher_BindLink"
c_pmatcher_bindlink :: AtomSpaceRef
-> Handle
-> IO Handle
-- | 'cogBind' calls the pattern matcher with the given bindLink.
-- (you should insert the bindlink to the atomspace before using this function).
cogBind :: Atom -> AtomSpace (Maybe Atom)
cogBind at = do
m <- getWithHandle at
case m of
Just (_,handle) -> do
asRef <- getAtomSpace
handleRes <- liftIO $ c_pmatcher_bindlink asRef handle
matom <- getByHandle handleRes
return $ matom
Nothing -> return Nothing
foreign import ccall "PatternMatcher_SatisfactionLink"
c_pmatcher_satisfactionlink :: AtomSpaceRef
-> Handle
-> Ptr CString
-> Ptr CDouble
-> IO CInt
cogSatisfy :: Atom -> AtomSpace (Maybe TruthVal)
cogSatisfy at = do
m <- getWithHandle at
case m of
Just (_,handle) -> do
asRef <- getAtomSpace
res <- liftIO $ getTVfromC $ c_pmatcher_satisfactionlink asRef handle
return res
Nothing -> return Nothing
|
inflector/atomspace
|
opencog/haskell/OpenCog/AtomSpace/Query.hs
|
agpl-3.0
| 2,011 | 0 | 14 | 550 | 417 | 227 | 190 | 41 | 2 |
-- | contains a prettyprinter for the
-- Template Haskell datatypes
module Language.Haskell.TH.Ppr where
-- All of the exports from this module should
-- be "public" functions. The main module TH
-- re-exports them all.
import Text.PrettyPrint (render)
import Language.Haskell.TH.PprLib
import Language.Haskell.TH.Syntax
import Data.Word ( Word8 )
import Data.Char ( toLower, chr, ord, isSymbol )
import GHC.Show ( showMultiLineString )
import Data.Ratio ( numerator, denominator )
nestDepth :: Int
nestDepth = 4
type Precedence = Int
appPrec, unopPrec, opPrec, noPrec :: Precedence
appPrec = 3 -- Argument of a function application
opPrec = 2 -- Argument of an infix operator
unopPrec = 1 -- Argument of an unresolved infix operator
noPrec = 0 -- Others
parensIf :: Bool -> Doc -> Doc
parensIf True d = parens d
parensIf False d = d
------------------------------
pprint :: Ppr a => a -> String
pprint x = render $ to_HPJ_Doc $ ppr x
class Ppr a where
ppr :: a -> Doc
ppr_list :: [a] -> Doc
ppr_list = vcat . map ppr
instance Ppr a => Ppr [a] where
ppr x = ppr_list x
------------------------------
instance Ppr Name where
ppr v = pprName v
------------------------------
instance Ppr Info where
ppr (TyConI d) = ppr d
ppr (ClassI d is) = ppr d $$ vcat (map ppr is)
ppr (FamilyI d is) = ppr d $$ vcat (map ppr is)
ppr (PrimTyConI name arity is_unlifted)
= text "Primitive"
<+> (if is_unlifted then text "unlifted" else empty)
<+> text "type constructor" <+> quotes (ppr name)
<+> parens (text "arity" <+> int arity)
ppr (ClassOpI v ty cls fix)
= text "Class op from" <+> ppr cls <> colon <+>
vcat [ppr_sig v ty, pprFixity v fix]
ppr (DataConI v ty tc fix)
= text "Constructor from" <+> ppr tc <> colon <+>
vcat [ppr_sig v ty, pprFixity v fix]
ppr (TyVarI v ty)
= text "Type variable" <+> ppr v <+> equals <+> ppr ty
ppr (VarI v ty mb_d fix)
= vcat [ppr_sig v ty, pprFixity v fix,
case mb_d of { Nothing -> empty; Just d -> ppr d }]
ppr_sig :: Name -> Type -> Doc
ppr_sig v ty = ppr v <+> text "::" <+> ppr ty
pprFixity :: Name -> Fixity -> Doc
pprFixity _ f | f == defaultFixity = empty
pprFixity v (Fixity i d) = ppr_fix d <+> int i <+> ppr v
where ppr_fix InfixR = text "infixr"
ppr_fix InfixL = text "infixl"
ppr_fix InfixN = text "infix"
------------------------------
instance Ppr Module where
ppr (Module pkg m) = text (pkgString pkg) <+> text (modString m)
instance Ppr ModuleInfo where
ppr (ModuleInfo imps) = text "Module" <+> vcat (map ppr imps)
------------------------------
instance Ppr Exp where
ppr = pprExp noPrec
pprPrefixOcc :: Name -> Doc
-- Print operators with parens around them
pprPrefixOcc n = parensIf (isSymOcc n) (ppr n)
isSymOcc :: Name -> Bool
isSymOcc n
= case nameBase n of
[] -> True -- Empty name; weird
(c:_) -> isSymbolASCII c || (ord c > 0x7f && isSymbol c)
-- c.f. OccName.startsVarSym in GHC itself
isSymbolASCII :: Char -> Bool
isSymbolASCII c = c `elem` "!#$%&*+./<=>?@\\^|~-"
pprInfixExp :: Exp -> Doc
pprInfixExp (VarE v) = pprName' Infix v
pprInfixExp (ConE v) = pprName' Infix v
pprInfixExp _ = text "<<Non-variable/constructor in infix context>>"
pprExp :: Precedence -> Exp -> Doc
pprExp _ (VarE v) = pprName' Applied v
pprExp _ (ConE c) = pprName' Applied c
pprExp i (LitE l) = pprLit i l
pprExp i (AppE e1 e2) = parensIf (i >= appPrec) $ pprExp opPrec e1
<+> pprExp appPrec e2
pprExp _ (ParensE e) = parens (pprExp noPrec e)
pprExp i (UInfixE e1 op e2)
= parensIf (i > unopPrec) $ pprExp unopPrec e1
<+> pprInfixExp op
<+> pprExp unopPrec e2
pprExp i (InfixE (Just e1) op (Just e2))
= parensIf (i >= opPrec) $ pprExp opPrec e1
<+> pprInfixExp op
<+> pprExp opPrec e2
pprExp _ (InfixE me1 op me2) = parens $ pprMaybeExp noPrec me1
<+> pprInfixExp op
<+> pprMaybeExp noPrec me2
pprExp i (LamE ps e) = parensIf (i > noPrec) $ char '\\' <> hsep (map (pprPat appPrec) ps)
<+> text "->" <+> ppr e
pprExp i (LamCaseE ms) = parensIf (i > noPrec)
$ text "\\case" $$ nest nestDepth (ppr ms)
pprExp _ (TupE es) = parens $ sep $ punctuate comma $ map ppr es
pprExp _ (UnboxedTupE es) = hashParens $ sep $ punctuate comma $ map ppr es
-- Nesting in Cond is to avoid potential problems in do statments
pprExp i (CondE guard true false)
= parensIf (i > noPrec) $ sep [text "if" <+> ppr guard,
nest 1 $ text "then" <+> ppr true,
nest 1 $ text "else" <+> ppr false]
pprExp i (MultiIfE alts)
= parensIf (i > noPrec) $ vcat $
case alts of
[] -> [text "if {}"]
(alt : alts') -> text "if" <+> pprGuarded arrow alt
: map (nest 3 . pprGuarded arrow) alts'
pprExp i (LetE ds_ e) = parensIf (i > noPrec) $ text "let" <+> pprDecs ds_
$$ text " in" <+> ppr e
where
pprDecs [] = empty
pprDecs [d] = ppr d
pprDecs ds = braces $ sep $ punctuate semi $ map ppr ds
pprExp i (CaseE e ms)
= parensIf (i > noPrec) $ text "case" <+> ppr e <+> text "of"
$$ nest nestDepth (ppr ms)
pprExp i (DoE ss_) = parensIf (i > noPrec) $ text "do" <+> pprStms ss_
where
pprStms [] = empty
pprStms [s] = ppr s
pprStms ss = braces $ sep $ punctuate semi $ map ppr ss
pprExp _ (CompE []) = text "<<Empty CompExp>>"
-- This will probably break with fixity declarations - would need a ';'
pprExp _ (CompE ss) = text "[" <> ppr s
<+> text "|"
<+> (sep $ punctuate comma $ map ppr ss')
<> text "]"
where s = last ss
ss' = init ss
pprExp _ (ArithSeqE d) = ppr d
pprExp _ (ListE es) = brackets $ sep $ punctuate comma $ map ppr es
pprExp i (SigE e t) = parensIf (i > noPrec) $ ppr e <+> text "::" <+> ppr t
pprExp _ (RecConE nm fs) = ppr nm <> braces (pprFields fs)
pprExp _ (RecUpdE e fs) = pprExp appPrec e <> braces (pprFields fs)
pprFields :: [(Name,Exp)] -> Doc
pprFields = sep . punctuate comma . map (\(s,e) -> ppr s <+> equals <+> ppr e)
pprMaybeExp :: Precedence -> Maybe Exp -> Doc
pprMaybeExp _ Nothing = empty
pprMaybeExp i (Just e) = pprExp i e
------------------------------
instance Ppr Stmt where
ppr (BindS p e) = ppr p <+> text "<-" <+> ppr e
ppr (LetS ds) = text "let" <+> ppr ds
ppr (NoBindS e) = ppr e
ppr (ParS sss) = sep $ punctuate (text "|")
$ map (sep . punctuate comma . map ppr) sss
------------------------------
instance Ppr Match where
ppr (Match p rhs ds) = ppr p <+> pprBody False rhs
$$ where_clause ds
------------------------------
pprGuarded :: Doc -> (Guard, Exp) -> Doc
pprGuarded eqDoc (guard, expr) = case guard of
NormalG guardExpr -> char '|' <+> ppr guardExpr <+> eqDoc <+> ppr expr
PatG stmts -> char '|' <+> vcat (punctuate comma $ map ppr stmts) $$
nest nestDepth (eqDoc <+> ppr expr)
------------------------------
pprBody :: Bool -> Body -> Doc
pprBody eq body = case body of
GuardedB xs -> nest nestDepth $ vcat $ map (pprGuarded eqDoc) xs
NormalB e -> eqDoc <+> ppr e
where eqDoc | eq = equals
| otherwise = arrow
------------------------------
pprLit :: Precedence -> Lit -> Doc
pprLit i (IntPrimL x) = parensIf (i > noPrec && x < 0)
(integer x <> char '#')
pprLit _ (WordPrimL x) = integer x <> text "##"
pprLit i (FloatPrimL x) = parensIf (i > noPrec && x < 0)
(float (fromRational x) <> char '#')
pprLit i (DoublePrimL x) = parensIf (i > noPrec && x < 0)
(double (fromRational x) <> text "##")
pprLit i (IntegerL x) = parensIf (i > noPrec && x < 0) (integer x)
pprLit _ (CharL c) = text (show c)
pprLit _ (StringL s) = pprString s
pprLit _ (StringPrimL s) = pprString (bytesToString s) <> char '#'
pprLit i (RationalL rat) = parensIf (i > noPrec) $
integer (numerator rat) <+> char '/'
<+> integer (denominator rat)
bytesToString :: [Word8] -> String
bytesToString = map (chr . fromIntegral)
pprString :: String -> Doc
-- Print newlines as newlines with Haskell string escape notation,
-- not as '\n'. For other non-printables use regular escape notation.
pprString s = vcat (map text (showMultiLineString s))
------------------------------
instance Ppr Pat where
ppr = pprPat noPrec
pprPat :: Precedence -> Pat -> Doc
pprPat i (LitP l) = pprLit i l
pprPat _ (VarP v) = pprName' Applied v
pprPat _ (TupP ps) = parens $ sep $ punctuate comma $ map ppr ps
pprPat _ (UnboxedTupP ps) = hashParens $ sep $ punctuate comma $ map ppr ps
pprPat i (ConP s ps) = parensIf (i >= appPrec) $ pprName' Applied s
<+> sep (map (pprPat appPrec) ps)
pprPat _ (ParensP p) = parens $ pprPat noPrec p
pprPat i (UInfixP p1 n p2)
= parensIf (i > unopPrec) (pprPat unopPrec p1 <+>
pprName' Infix n <+>
pprPat unopPrec p2)
pprPat i (InfixP p1 n p2)
= parensIf (i >= opPrec) (pprPat opPrec p1 <+>
pprName' Infix n <+>
pprPat opPrec p2)
pprPat i (TildeP p) = parensIf (i > noPrec) $ char '~' <> pprPat appPrec p
pprPat i (BangP p) = parensIf (i > noPrec) $ char '!' <> pprPat appPrec p
pprPat i (AsP v p) = parensIf (i > noPrec) $ ppr v <> text "@"
<> pprPat appPrec p
pprPat _ WildP = text "_"
pprPat _ (RecP nm fs)
= parens $ ppr nm
<+> braces (sep $ punctuate comma $
map (\(s,p) -> ppr s <+> equals <+> ppr p) fs)
pprPat _ (ListP ps) = brackets $ sep $ punctuate comma $ map ppr ps
pprPat i (SigP p t) = parensIf (i > noPrec) $ ppr p <+> text "::" <+> ppr t
pprPat _ (ViewP e p) = parens $ pprExp noPrec e <+> text "->" <+> pprPat noPrec p
------------------------------
instance Ppr Dec where
ppr = ppr_dec True
ppr_dec :: Bool -- declaration on the toplevel?
-> Dec
-> Doc
ppr_dec _ (FunD f cs) = vcat $ map (\c -> pprPrefixOcc f <+> ppr c) cs
ppr_dec _ (ValD p r ds) = ppr p <+> pprBody True r
$$ where_clause ds
ppr_dec _ (TySynD t xs rhs)
= ppr_tySyn empty t (hsep (map ppr xs)) rhs
ppr_dec _ (DataD ctxt t xs cs decs)
= ppr_data empty ctxt t (hsep (map ppr xs)) cs decs
ppr_dec _ (NewtypeD ctxt t xs c decs)
= ppr_newtype empty ctxt t (sep (map ppr xs)) c decs
ppr_dec _ (ClassD ctxt c xs fds ds)
= text "class" <+> pprCxt ctxt <+> ppr c <+> hsep (map ppr xs) <+> ppr fds
$$ where_clause ds
ppr_dec _ (InstanceD ctxt i ds) = text "instance" <+> pprCxt ctxt <+> ppr i
$$ where_clause ds
ppr_dec _ (SigD f t) = pprPrefixOcc f <+> text "::" <+> ppr t
ppr_dec _ (ForeignD f) = ppr f
ppr_dec _ (InfixD fx n) = pprFixity n fx
ppr_dec _ (PragmaD p) = ppr p
ppr_dec isTop (FamilyD flav tc tvs k)
= ppr flav <+> maybeFamily <+> ppr tc <+> hsep (map ppr tvs) <+> maybeKind
where
maybeFamily | isTop = text "family"
| otherwise = empty
maybeKind | (Just k') <- k = text "::" <+> ppr k'
| otherwise = empty
ppr_dec isTop (DataInstD ctxt tc tys cs decs)
= ppr_data maybeInst ctxt tc (sep (map pprParendType tys)) cs decs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (NewtypeInstD ctxt tc tys c decs)
= ppr_newtype maybeInst ctxt tc (sep (map pprParendType tys)) c decs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (TySynInstD tc (TySynEqn tys rhs))
= ppr_tySyn maybeInst tc (sep (map pprParendType tys)) rhs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec _ (ClosedTypeFamilyD tc tvs mkind eqns)
= hang (hsep [ text "type family", ppr tc, hsep (map ppr tvs), maybeKind
, text "where" ])
nestDepth (vcat (map ppr_eqn eqns))
where
maybeKind | (Just k') <- mkind = text "::" <+> ppr k'
| otherwise = empty
ppr_eqn (TySynEqn lhs rhs)
= ppr tc <+> sep (map pprParendType lhs) <+> text "=" <+> ppr rhs
ppr_dec _ (RoleAnnotD name roles)
= hsep [ text "type role", ppr name ] <+> hsep (map ppr roles)
ppr_data :: Doc -> Cxt -> Name -> Doc -> [Con] -> [Name] -> Doc
ppr_data maybeInst ctxt t argsDoc cs decs
= sep [text "data" <+> maybeInst
<+> pprCxt ctxt
<+> ppr t <+> argsDoc,
nest nestDepth (sep (pref $ map ppr cs)),
if null decs
then empty
else nest nestDepth
$ text "deriving"
<+> parens (hsep $ punctuate comma $ map ppr decs)]
where
pref :: [Doc] -> [Doc]
pref [] = [] -- No constructors; can't happen in H98
pref (d:ds) = (char '=' <+> d):map (char '|' <+>) ds
ppr_newtype :: Doc -> Cxt -> Name -> Doc -> Con -> [Name] -> Doc
ppr_newtype maybeInst ctxt t argsDoc c decs
= sep [text "newtype" <+> maybeInst
<+> pprCxt ctxt
<+> ppr t <+> argsDoc,
nest 2 (char '=' <+> ppr c),
if null decs
then empty
else nest nestDepth
$ text "deriving"
<+> parens (hsep $ punctuate comma $ map ppr decs)]
ppr_tySyn :: Doc -> Name -> Doc -> Type -> Doc
ppr_tySyn maybeInst t argsDoc rhs
= text "type" <+> maybeInst <+> ppr t <+> argsDoc <+> text "=" <+> ppr rhs
------------------------------
instance Ppr FunDep where
ppr (FunDep xs ys) = hsep (map ppr xs) <+> text "->" <+> hsep (map ppr ys)
ppr_list [] = empty
ppr_list xs = char '|' <+> sep (punctuate (text ", ") (map ppr xs))
------------------------------
instance Ppr FamFlavour where
ppr DataFam = text "data"
ppr TypeFam = text "type"
------------------------------
instance Ppr Foreign where
ppr (ImportF callconv safety impent as typ)
= text "foreign import"
<+> showtextl callconv
<+> showtextl safety
<+> text (show impent)
<+> ppr as
<+> text "::" <+> ppr typ
ppr (ExportF callconv expent as typ)
= text "foreign export"
<+> showtextl callconv
<+> text (show expent)
<+> ppr as
<+> text "::" <+> ppr typ
------------------------------
instance Ppr Pragma where
ppr (InlineP n inline rm phases)
= text "{-#"
<+> ppr inline
<+> ppr rm
<+> ppr phases
<+> ppr n
<+> text "#-}"
ppr (SpecialiseP n ty inline phases)
= text "{-# SPECIALISE"
<+> maybe empty ppr inline
<+> ppr phases
<+> sep [ ppr n <+> text "::"
, nest 2 $ ppr ty ]
<+> text "#-}"
ppr (SpecialiseInstP inst)
= text "{-# SPECIALISE instance" <+> ppr inst <+> text "#-}"
ppr (RuleP n bndrs lhs rhs phases)
= sep [ text "{-# RULES" <+> pprString n <+> ppr phases
, nest 4 $ ppr_forall <+> ppr lhs
, nest 4 $ char '=' <+> ppr rhs <+> text "#-}" ]
where ppr_forall | null bndrs = empty
| otherwise = text "forall"
<+> fsep (map ppr bndrs)
<+> char '.'
ppr (AnnP tgt expr)
= text "{-# ANN" <+> target1 tgt <+> ppr expr <+> text "#-}"
where target1 ModuleAnnotation = text "module"
target1 (TypeAnnotation t) = text "type" <+> ppr t
target1 (ValueAnnotation v) = ppr v
------------------------------
instance Ppr Inline where
ppr NoInline = text "NOINLINE"
ppr Inline = text "INLINE"
ppr Inlinable = text "INLINABLE"
------------------------------
instance Ppr RuleMatch where
ppr ConLike = text "CONLIKE"
ppr FunLike = empty
------------------------------
instance Ppr Phases where
ppr AllPhases = empty
ppr (FromPhase i) = brackets $ int i
ppr (BeforePhase i) = brackets $ char '~' <> int i
------------------------------
instance Ppr RuleBndr where
ppr (RuleVar n) = ppr n
ppr (TypedRuleVar n ty) = parens $ ppr n <+> text "::" <+> ppr ty
------------------------------
instance Ppr Clause where
ppr (Clause ps rhs ds) = hsep (map (pprPat appPrec) ps) <+> pprBody True rhs
$$ where_clause ds
------------------------------
instance Ppr Con where
ppr (NormalC c sts) = ppr c <+> sep (map pprStrictType sts)
ppr (RecC c vsts)
= ppr c <+> braces (sep (punctuate comma $ map pprVarStrictType vsts))
ppr (InfixC st1 c st2) = pprStrictType st1
<+> pprName' Infix c
<+> pprStrictType st2
ppr (ForallC ns ctxt con) = text "forall" <+> hsep (map ppr ns)
<+> char '.' <+> sep [pprCxt ctxt, ppr con]
------------------------------
pprVarStrictType :: (Name, Strict, Type) -> Doc
-- Slight infelicity: with print non-atomic type with parens
pprVarStrictType (v, str, t) = ppr v <+> text "::" <+> pprStrictType (str, t)
------------------------------
pprStrictType :: (Strict, Type) -> Doc
-- Prints with parens if not already atomic
pprStrictType (IsStrict, t) = char '!' <> pprParendType t
pprStrictType (NotStrict, t) = pprParendType t
pprStrictType (Unpacked, t) = text "{-# UNPACK #-} !" <> pprParendType t
------------------------------
pprParendType :: Type -> Doc
pprParendType (VarT v) = ppr v
pprParendType (ConT c) = ppr c
pprParendType (TupleT 0) = text "()"
pprParendType (TupleT n) = parens (hcat (replicate (n-1) comma))
pprParendType (UnboxedTupleT n) = hashParens $ hcat $ replicate (n-1) comma
pprParendType ArrowT = parens (text "->")
pprParendType ListT = text "[]"
pprParendType (LitT l) = pprTyLit l
pprParendType (PromotedT c) = text "'" <> ppr c
pprParendType (PromotedTupleT 0) = text "'()"
pprParendType (PromotedTupleT n) = quoteParens (hcat (replicate (n-1) comma))
pprParendType PromotedNilT = text "'[]"
pprParendType PromotedConsT = text "(':)"
pprParendType StarT = char '*'
pprParendType ConstraintT = text "Constraint"
pprParendType other = parens (ppr other)
instance Ppr Type where
ppr (ForallT tvars ctxt ty)
= text "forall" <+> hsep (map ppr tvars) <+> text "."
<+> sep [pprCxt ctxt, ppr ty]
ppr (SigT ty k) = ppr ty <+> text "::" <+> ppr k
ppr ty = pprTyApp (split ty)
pprTyApp :: (Type, [Type]) -> Doc
pprTyApp (ArrowT, [arg1,arg2]) = sep [pprFunArgType arg1 <+> text "->", ppr arg2]
pprTyApp (EqualityT, [arg1, arg2]) =
sep [pprFunArgType arg1 <+> text "~", ppr arg2]
pprTyApp (ListT, [arg]) = brackets (ppr arg)
pprTyApp (TupleT n, args)
| length args == n = parens (sep (punctuate comma (map ppr args)))
pprTyApp (PromotedTupleT n, args)
| length args == n = quoteParens (sep (punctuate comma (map ppr args)))
pprTyApp (fun, args) = pprParendType fun <+> sep (map pprParendType args)
pprFunArgType :: Type -> Doc -- Should really use a precedence argument
-- Everything except forall and (->) binds more tightly than (->)
pprFunArgType ty@(ForallT {}) = parens (ppr ty)
pprFunArgType ty@((ArrowT `AppT` _) `AppT` _) = parens (ppr ty)
pprFunArgType ty@(SigT _ _) = parens (ppr ty)
pprFunArgType ty = ppr ty
split :: Type -> (Type, [Type]) -- Split into function and args
split t = go t []
where go (AppT t1 t2) args = go t1 (t2:args)
go ty args = (ty, args)
pprTyLit :: TyLit -> Doc
pprTyLit (NumTyLit n) = integer n
pprTyLit (StrTyLit s) = text (show s)
instance Ppr TyLit where
ppr = pprTyLit
------------------------------
instance Ppr TyVarBndr where
ppr (PlainTV nm) = ppr nm
ppr (KindedTV nm k) = parens (ppr nm <+> text "::" <+> ppr k)
instance Ppr Role where
ppr NominalR = text "nominal"
ppr RepresentationalR = text "representational"
ppr PhantomR = text "phantom"
ppr InferR = text "_"
------------------------------
pprCxt :: Cxt -> Doc
pprCxt [] = empty
pprCxt [t] = ppr t <+> text "=>"
pprCxt ts = parens (sep $ punctuate comma $ map ppr ts) <+> text "=>"
------------------------------
instance Ppr Range where
ppr = brackets . pprRange
where pprRange :: Range -> Doc
pprRange (FromR e) = ppr e <> text ".."
pprRange (FromThenR e1 e2) = ppr e1 <> text ","
<> ppr e2 <> text ".."
pprRange (FromToR e1 e2) = ppr e1 <> text ".." <> ppr e2
pprRange (FromThenToR e1 e2 e3) = ppr e1 <> text ","
<> ppr e2 <> text ".."
<> ppr e3
------------------------------
where_clause :: [Dec] -> Doc
where_clause [] = empty
where_clause ds = nest nestDepth $ text "where" <+> vcat (map (ppr_dec False) ds)
showtextl :: Show a => a -> Doc
showtextl = text . map toLower . show
hashParens :: Doc -> Doc
hashParens d = text "(# " <> d <> text " #)"
quoteParens :: Doc -> Doc
quoteParens d = text "'(" <> d <> text ")"
|
holzensp/ghc
|
libraries/template-haskell/Language/Haskell/TH/Ppr.hs
|
bsd-3-clause
| 21,874 | 25 | 14 | 6,675 | 8,088 | 3,965 | 4,123 | 448 | 6 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
module Network.Wai.Handler.Warp.Header where
import Data.Array
import Data.Array.ST
import Network.HTTP.Types
import Network.Wai.Handler.Warp.Types
----------------------------------------------------------------
-- | Array for a set of HTTP headers.
type IndexedHeader = Array Int (Maybe HeaderValue)
----------------------------------------------------------------
indexRequestHeader :: RequestHeaders -> IndexedHeader
indexRequestHeader hdr = traverseHeader hdr requestMaxIndex requestKeyIndex
idxContentLength,idxTransferEncoding,idxExpect :: Int
idxConnection,idxRange,idxHost :: Int
idxContentLength = 0
idxTransferEncoding = 1
idxExpect = 2
idxConnection = 3
idxRange = 4
idxHost = 5
-- | The size for 'IndexedHeader' for HTTP Request.
-- From 0 to this corresponds to \"Content-Length\", \"Transfer-Encoding\",
-- \"Expect\", \"Connection\", \"Range\", and \"Host\".
requestMaxIndex :: Int
requestMaxIndex = 5
requestKeyIndex :: HeaderName -> Int
requestKeyIndex "content-length" = idxContentLength
requestKeyIndex "transfer-encoding" = idxTransferEncoding
requestKeyIndex "expect" = idxExpect
requestKeyIndex "connection" = idxConnection
requestKeyIndex "range" = idxRange
requestKeyIndex "host" = idxHost
requestKeyIndex _ = -1
defaultIndexRequestHeader :: IndexedHeader
defaultIndexRequestHeader = array (0,requestMaxIndex) [(i,Nothing)|i<-[0..requestMaxIndex]]
----------------------------------------------------------------
indexResponseHeader :: ResponseHeaders -> IndexedHeader
indexResponseHeader hdr = traverseHeader hdr responseMaxIndex responseKeyIndex
idxServer, idxDate :: Int
--idxContentLength = 0
idxServer = 1
idxDate = 2
-- | The size for 'IndexedHeader' for HTTP Response.
responseMaxIndex :: Int
responseMaxIndex = 2
responseKeyIndex :: HeaderName -> Int
responseKeyIndex "content-length" = idxContentLength
responseKeyIndex "server" = idxServer
responseKeyIndex "date" = idxDate
responseKeyIndex _ = -1
----------------------------------------------------------------
traverseHeader :: [Header] -> Int -> (HeaderName -> Int) -> IndexedHeader
traverseHeader hdr maxidx getIndex = runSTArray $ do
arr <- newArray (0,maxidx) Nothing
mapM_ (insert arr) hdr
return arr
where
insert arr (key,val)
| idx == -1 = return ()
| otherwise = writeArray arr idx (Just val)
where
idx = getIndex key
|
dylex/wai
|
warp/Network/Wai/Handler/Warp/Header.hs
|
mit
| 2,591 | 0 | 11 | 482 | 496 | 276 | 220 | 50 | 1 |
-- To test with GHC before liftA2 was added to the Applicative
-- class, remove the definition of liftA2 here, and import
-- liftA2 separately from Control.Applicative.
{-# LANGUAGE DeriveTraversable, GADTs, DataKinds,
DeriveFunctor, StandaloneDeriving #-}
module Main where
import Control.Applicative (Applicative (..))
import Data.Monoid (Sum (..))
import qualified Data.Array as A
data Tree a = Leaf a a | Node (Tree a) (Tree a)
deriving (Functor, Foldable, Traversable)
buildTree :: Int -> a -> Tree a
buildTree 0 a = Leaf a a
buildTree n a =
let subtree = buildTree (n - 1) a
in Node subtree subtree
data Nat = Z | S Nat
data Vec n a where
Nil :: Vec 'Z a
Cons :: a -> !(Vec n a) -> Vec ('S n) a
deriving instance Functor (Vec n)
deriving instance Foldable (Vec n)
deriving instance Show a => Show (Vec n a)
class Pure n where
pure' :: a -> Vec n a
instance Pure 'Z where
pure' _ = Nil
instance Pure n => Pure ('S n) where
pure' a = Cons a (pure' a)
instance Pure n => Applicative (Vec n) where
pure = pure'
(<*>) = apVec
liftA2 = liftA2Vec
apVec :: Vec n (a -> b) -> Vec n a -> Vec n b
apVec Nil Nil = Nil
apVec (Cons f fs) (Cons x xs) = f x `Cons` apVec fs xs
liftA2Vec :: (a -> b -> c) -> Vec n a -> Vec n b -> Vec n c
liftA2Vec _ Nil Nil = Nil
liftA2Vec f (Cons x xs) (Cons y ys) = f x y `Cons` liftA2Vec f xs ys
data SomeVec a where
SomeVec :: Pure n => Vec n a -> SomeVec a
replicateVec :: Int -> a -> SomeVec a
replicateVec 0 _ = SomeVec Nil
replicateVec n a =
case replicateVec (n - 1) a of
SomeVec v -> SomeVec (a `Cons` v)
ones :: SomeVec Int
ones = replicateVec 6000 (1 :: Int)
theTree :: Tree ()
theTree = buildTree 7 ()
blah :: SomeVec (Tree Int)
blah = case ones of
SomeVec v -> SomeVec $ traverse (const v) theTree
main = case blah of
SomeVec v -> print $ getSum $ foldMap (foldMap Sum) v
|
ezyang/ghc
|
libraries/base/tests/T13191.hs
|
bsd-3-clause
| 1,877 | 0 | 11 | 444 | 817 | 420 | 397 | 54 | 1 |
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE MagicHash, UnliftedFFITypes #-}
{-# LANGUAGE ForeignFunctionInterface #-}
module Test10313 where
import "b\x61se" Data.List
{-# WARNING Logic
, solverCheckAndGetModel
"New Z3 API support is still incomplete and fragile: \
\you may experience segmentation faults!"
#-}
{-# Deprecated Logic
, solverCheckAndGetModel
"Deprecation: \
\you may experience segmentation faults!"
#-}
data {-# Ctype "foo\x63" "b\x61r" #-} Logic = Logic
-- Should warn
foo1 x = x
{-# RULES "foo1\x67" [ 1] forall x. foo1 x = x #-}
foreign import prim unsafe "a\x62" a :: IO Int
{-# INLINE strictStream #-}
strictStream (Bitstream l v)
= {-# CORE "Strict Bitstream stre\x61m" #-}
S.concatMap stream (GV.stream v)
`S.sized`
Exact l
b = {-# SCC "foo\x64" #-} 006
c = {-# GENERATED "foob\x61r" 1 : 2 - 3 : 4 #-} 0.00
|
siddhanathan/ghc
|
testsuite/tests/ghc-api/annotations/Test10313.hs
|
bsd-3-clause
| 934 | 0 | 9 | 235 | 106 | 63 | 43 | -1 | -1 |
module B where
import A
data Y = Y Int Int
thing :: X -> a
thing (X (Y a b)) = thing (X (Y a b))
|
urbanslug/ghc
|
testsuite/tests/driver/T8184/B.hs
|
bsd-3-clause
| 100 | 0 | 9 | 30 | 66 | 35 | 31 | 5 | 1 |
{-
------- Forwarded Message
Date: Wed, 30 Nov 1994 16:34:18 +0100
From: John Hughes <[email protected]>
To: [email protected], [email protected]
Subject: Nice little program
Lennart, Simon,
You might like to look at the fun little program below.
THUMBS DOWN to hbc for compiling it (it prints [72, 101, 108, 108, 111])
THUMBS UP to ghc for rejecting it --- but what an error message!
nhc and gofer both reject it with the right error message.
I haven't tried Yale Haskell.
Enjoy!
- ----------------------------
-}
class HasX a where
setX :: x->a->a
data X x = X x
instance HasX (X x) where
setX x (X _) = X x
changetype x = case setX x (X (error "change type!")) of X y->y
main = print (changetype "Hello" :: [Int])
{-
------- End of Forwarded Message
-}
|
siddhanathan/ghc
|
testsuite/tests/typecheck/should_fail/tcfail065.hs
|
bsd-3-clause
| 795 | 1 | 10 | 161 | 130 | 64 | 66 | 7 | 1 |
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE RankNTypes #-}
{-|
Module : Data.Utils.Arrow
Description : arrow utilities
Copyright : (c) Lars Brünjes, 2016
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : portable
This module defines utility functions for /arrows/.
-}
module Data.Utils.Arrow
( ArrowConvolve(..)
, fmapArr
, pureArr
, apArr
, dimapArr
) where
import Control.Arrow
-- | Arrows implementing 'ArrowConvolve' can be mapped over containers.
-- This means that every functor (@f :: Hask -> Hask@) lifts to a functor (@a -> a@).
--
-- Instances should satisfy the following laws:
--
-- * @convolve id = id@
--
-- * @convolve (g . h) = convolve g . convolve h@
--
-- * @convolve . arr = arr . fmap@
--
class Arrow a => ArrowConvolve a where
convolve :: forall f b c. Functor f => a b c -> a (f b) (f c)
-- | A function suitable to define the canonical 'Functor' instance for arrows.
--
fmapArr :: Arrow a => (c -> d) -> a b c -> a b d
fmapArr f a = a >>^ f
-- | A function to define 'pure' for arrows.
-- Combining this with 'apArr', the canonical 'Applicative' instance for arrows can easily be defined.
--
pureArr :: Arrow a => c -> a b c
pureArr = arr . const
-- | A function to define @('<*>')@ for arrows.
-- Combining this with 'pureArr', the canonical 'Applicative' instance for arrows can easily be defined.
--
apArr :: Arrow a => a b (c -> d) -> a b c -> a b d
apArr a b = proc x -> do
f <- a -< x
y <- b -< x
returnA -< f y
-- | A function suitable to define the canonical 'Data.Profunctor.Profunctor' instance for arrows.
--
dimapArr :: Arrow a => (b -> c) -> (d -> e) -> a c d -> a b e
dimapArr f g a = f ^>> a >>^ g
|
brunjlar/neural
|
src/Data/Utils/Arrow.hs
|
mit
| 1,781 | 1 | 12 | 415 | 359 | 193 | 166 | 23 | 1 |
{-# OPTIONS -XDeriveDataTypeable #-}
module Database where
import Data.Time.Calendar hiding (Day)
import System.Time hiding (Day, Month)
import Data.Generics
import Data.Map (Map)
import qualified Data.Map as Map
import Types
import ReservationUtils
type WV v = WebView Database v
users :: Map String (String, String)
users = Map.fromList [("martijn", ("p", "Martijn"))
,("", ("", "Anonymous"))
]
-- TODO: maybe this can be (a special) part of db?
newtype ReservationId = ReservationId {unReservationId :: Int} deriving (Show, Read, Eq, Ord)
-- must be Typeable and Data, because update functions in views (which must be Data etc.) are Database->Database
data Database = Database { allReservations :: Map ReservationId Reservation
} deriving (Eq, Show, Read, Typeable)
type Hours = Int
type Minutes = Int
type Day = Int
type Month = Int
type Year = Int
type Time = (Hours, Minutes)
type Date = (Day, Month, Year)
data Reservation =
Reservation { reservationId :: ReservationId
, date :: Date
, time :: Time
, name :: String
, nrOfPeople :: Int
, comment :: String
} deriving (Eq, Show, Read)
-- not safe, requires that id in reservation is respected by f
updateReservation :: ReservationId -> (Reservation -> Reservation) -> Database -> Database
updateReservation i f db =
let reservation = unsafeLookup (allReservations db) i
in db { allReservations = Map.insert i (f reservation) (allReservations db)
}
-- add error
removeReservation :: ReservationId -> Database -> Database
removeReservation i db = db { allReservations = Map.delete i (allReservations db) }
newReservation :: Database -> (Reservation, Database)
newReservation db =
let ids = [ i | ReservationId i <- map fst (Map.toList $ allReservations db) ]
newId = ReservationId $ if null ids then 0 else (maximum ids + 1)
newReservation = Reservation newId (0,0,0) (0,0) "" 0 ""
in (newReservation, db { allReservations = Map.insert newId newReservation (allReservations db) } )
unsafeLookup map key =
Map.findWithDefault
(error $ "element "++ show key ++ " not found in " ++ show map) key map
-- do we want extra params such as pig nrs in sub views?
-- error handling database access
mkInitialDatabase :: IO (Database)
mkInitialDatabase =
do { clockTime <- getClockTime
; ct <- toCalendarTime clockTime
; let (_, currentMonth, currentYear) = (ctDay ct, 1+fromEnum (ctMonth ct), ctYear ct)
months = take 12 $ iterate increaseMonth (currentMonth, currentYear)
monthsWithNrOfDays = [ (m,y,gregorianMonthLength (fromIntegral y) m) | (m,y) <- months ]
datedReservations = concat $
[ addDates' m y $ take nrOfDays lotOfReservations | (m,y,nrOfDays) <- monthsWithNrOfDays ]
; return $ Database $ Map.fromList $ addIds datedReservations
}
where addIds ress = [ (ReservationId i, Reservation (ReservationId i) dt tm nm nr c)
| (i,(dt, tm, nm, nr, c)) <- zip [0..] ress
]
addDates' m y resss = [ ((d,m,y),tm,nm,nr,c)
| (d,ress) <- zip [1..] resss
, (tm,nm,nr,c) <- ress
]
lotOfReservations = concat . repeat $
[ [ ((20,00), "Nathan", 2, "Nathan says hi")
, ((20,00), "Tommy", 3, "")
, ((20,00), "Paul", 2, "")
, ((20,30), "Bridget", 3, "")
, ((20,30), "Nicole", 4, "")
, ((22,00), "Ann", 8, "")
]
, [ ((21,00), "Charlie", 8, "Dinner at nine")
]
, [ ((18,00), "Sam", 3, "Would like the special menu")
]
, []
]
|
Oblosys/webviews
|
src/exec/Reservations/Database.hs
|
mit
| 4,042 | 0 | 15 | 1,313 | 1,221 | 706 | 515 | 71 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import HlLog
import HlAdif
import HlOptions
import Options.Applicative
import Prelude hiding (readFile, putStr)
import System.IO hiding (readFile, putStr)
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Search as S
import Data.ByteString.Lazy (toStrict)
import Data.Semigroup ((<>))
data Options = Options
{ getInputHandle :: IO Handle
}
getOptionsParserInfo :: IO (ParserInfo Options)
getOptionsParserInfo = do
return $ info (helper <*> (
Options
<$> inputHandleArgument
)) (
fullDesc
<> progDesc "Present the contents of an ADIF file as a Microsoft Office 2003 XML spreadsheet."
)
xmlRow :: [ByteString] -> ByteString
xmlRow bss = " <Row>\n" <> B.concat (map xmlCell bss) <> " </Row>\n"
xmlCell :: ByteString -> ByteString
xmlCell s = " <Cell><Data ss:Type=\"String\">" <> escaped <> "</Data></Cell>\n"
where
escaped = foldl escape s [("&", "&"), ("\"", """), ("'", "'"), ("<", "<"), (">", ">")]
escape :: ByteString -> (ByteString, ByteString) -> ByteString
escape bs (toS, toR) = toStrict $ S.replace toS toR bs
main :: IO ()
main = getOptionsParserInfo >>= execParser >>= \opt -> do
h <- getInputHandle opt
parseResult <- adifLogParser <$> B.hGetContents h
case parseResult of
Left errorMsg -> putStrLn errorMsg
Right l -> do
B.putStr $
"<?xml version=\"1.0\"?>\n" <>
"<?mso-application progid=\"Excel.Sheet\"?>\n" <>
"<Workbook\n" <>
" xmlns=\"urn:schemas-microsoft-com:office:spreadsheet\"\n" <>
" xmlns:o=\"urn:schemas-microsoft-com:office:office\"\n" <>
" xmlns:x=\"urn:schemas-microsoft-com:office:excel\"\n" <>
" xmlns:ss=\"urn:schemas-microsoft-com:office:spreadsheet\"\n" <>
" xmlns:html=\"http://www.w3.org/TR/REC-html40\">\n" <>
" <DocumentProperties xmlns=\"urn:schemas-microsoft-com:office:office\">\n" <>
" <Author>HamLogHS</Author>\n" <>
" <LastAuthor>Self</LastAuthor>\n" <>
" <Created>2012-03-15T23:04:04Z</Created>\n" <> -- TODO: we could use the real date
" <Company>HamLogHS</Company>\n" <> -- TODO: is this necessary?
" <Version>1.0</Version>\n" <> -- TODO: and this?
" </DocumentProperties>\n" <>
" <ExcelWorkbook xmlns=\"urn:schemas-microsoft-com:office:excel\">\n" <> -- TODO: and this?
" <WindowHeight>6795</WindowHeight>\n" <>
" <WindowWidth>8460</WindowWidth>\n" <>
" <WindowTopX>120</WindowTopX>\n" <>
" <WindowTopY>15</WindowTopY>\n" <>
" <ProtectStructure>False</ProtectStructure>\n" <>
" <ProtectWindows>False</ProtectWindows>\n" <>
" </ExcelWorkbook>\n" <>
" <Styles>\n" <>
" <Style ss:ID=\"Default\" ss:Name=\"Normal\">\n" <>
" <Alignment ss:Vertical=\"Bottom\" />\n" <>
" <Borders />\n" <>
" <Font />\n" <>
" <Interior />\n" <>
" <NumberFormat />\n" <>
" <Protection />\n" <>
" </Style>\n" <>
" <Style ss:ID=\"s21\">\n" <>
" <Font x:Family=\"Swiss\" ss:Bold=\"1\" />\n" <>
" </Style>\n" <>
" </Styles>\n" <>
" <Worksheet ss:Name=\"QSOs\">\n" <>
" <Table ss:ExpandedColumnCount=\"2\" ss:ExpandedRowCount=\"5\"\n" <>
" x:FullColumns=\"1\" x:FullRows=\"1\">\n"
mapM_ B.putStr $ map xmlRow $ qsoTable l
B.putStr $
" </Table>\n" <>
" <WorksheetOptions xmlns=\"urn:schemas-microsoft-com:office:excel\">\n" <>
" <Print>\n" <>
" <ValidPrinterInfo />\n" <>
" <HorizontalResolution>600</HorizontalResolution>\n" <>
" <VerticalResolution>600</VerticalResolution>\n" <>
" </Print>\n" <>
" <Selected />\n" <>
" <Panes>\n" <>
" <Pane>\n" <>
" <Number>3</Number>\n" <>
" <ActiveRow>5</ActiveRow>\n" <>
" <ActiveCol>1</ActiveCol>\n" <>
" </Pane>\n" <>
" </Panes>\n" <>
" <Table>\n" <>
" </Table>\n" <>
" <ProtectObjects>False</ProtectObjects>\n" <>
" <ProtectScenarios>False</ProtectScenarios>\n" <>
" </WorksheetOptions>\n" <>
" </Worksheet>\n" <>
"</Workbook>"
|
netom/hamloghs
|
app/hl-to-msoxml.hs
|
mit
| 5,103 | 0 | 53 | 1,802 | 705 | 378 | 327 | 101 | 2 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module MySafeStore
(
initDB
, insertDB
, preparedInsertDB
, readDB
, closeDB
, sanitize
, DataStore
, UserInput
, readInput
) where
import Prelude hiding (takeWhile)
import Database.SQLite3
import Data.Monoid ((<>))
import Data.Text (Text, takeWhile)
newtype DataStore = DataStore {
conn :: Database
}
data Clean
data Dirty
newtype UserInput a = UserInput {
input :: Text
}
readInput :: Text -> UserInput Dirty
readInput = UserInput
initDB :: IO DataStore
initDB = do
conn <- open ":memory:"
exec conn "CREATE TABLE IF NOT EXISTS person (id INTEGER PRIMARY KEY, name TEXT)"
exec conn "INSERT INTO person (name) VALUES ('Tim')"
exec conn "INSERT INTO person (name) VALUES ('Mark')"
exec conn "INSERT INTO person (name) VALUES ('Sarah')"
return $ DataStore conn
insertDB :: DataStore -> UserInput Clean -> IO ()
insertDB DataStore{..} UserInput{..} =
exec conn $ "INSERT INTO person (name) VALUES ('" <> input <> "')"
-- The way you should *actually* build up this insert. :)
preparedInsertDB :: DataStore -> Text -> IO ()
preparedInsertDB DataStore{..} name = do
statement <- prepare conn "INSERT INTO person (name) VALUES (?)"
bindSQLData statement 1 (SQLText name)
_ <- step statement
return ()
readDB :: DataStore -> IO ()
readDB DataStore{..} = execPrint conn "select * from person"
closeDB :: DataStore -> IO ()
closeDB = close . conn
-- Please don't use this sanitization function in real life!
sanitize :: UserInput Dirty -> UserInput Clean
sanitize =
UserInput . takeWhile (`notElem` [';','\'','"']) . input
|
haskell-web-intro/secdev
|
examples/common/src/MySafeStore.hs
|
mit
| 1,636 | 0 | 9 | 317 | 427 | 225 | 202 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-} -- Needed to catch exceptions properly
-- | Author: Thorsten Rangwich
-- | See file LICENSE for details on using this code.
-- | This module contains a TCP/IP socket server that executes functions
-- | over a network socket. A particular protocol is used to send a one
-- | line request with a function name and parameters. The function is
-- | executed and the result sent back to the caller. Results are provided
-- | in a table like form, where each row may even contain a different number
-- | of columns.
-- | A particular function interface is specified. Users of the socket server
-- | implement these non network functions with this protocol and the server provides
-- | them over the network. Every function is a either a pure function or a pair
-- | of a pure function and an IO function. The pure function returns a possibly
-- | modified state dictionary, but is not allowed to do IO.
-- | In the case of a pair of functions, the pure function returns the modified
-- | state dictionary *and* a number of arguments the non pure function can
-- | read as commands. The non pure function is not allowed to return a modified
-- | state but its is effect are side effects only.
-- | There is a special case of a shutdown function (pure function), that returns
-- | a flag, if the server shall shut down.
-- | Once started, the server waits for network calls and calls the registered
-- | network functions until receiving a shutdown call.
-- | By default, a shutdown is initiated if the server receives a function call
-- | named shutdown. You can override by defining a function named shutdown.
-- |
module SocketServer
(
-- * Public API functions
serveSocketUntilShutdown,
pushHandler,
-- * Type declarations for puclic API
HandlerFunc,
SyncFunc,
-- * API interface
-- ** Initialisation arguments
ConnectionArguments(..),
-- **Default arguments
connectionDefault,
-- * Implementation API (you will not need that)
-- ** Argument type declarations
SocketFunction(..),
FunctionRegistry,
-- ** Default separator
separator,
-- ** Charactor marking really empty lines
nullCharacter,
-- * Test functions
test
)
where
import qualified Network.Socket as Socket
import qualified System.IO as SysIO
import qualified Data.Map as Map
import qualified BaseTools
import qualified Control.Exception as Exception
-- | Separator used by network functions. Only a native client will need that.
-- Better use an integrated client from here (not yet implemented).
separator :: Char
separator = '|'
-- | Character sent within a line, if the line originally was empty. This is to distinguish empty lines
-- | from lines containing really no fields.
nullCharacter :: Char
nullCharacter = '\x1f'
-- | Prototype of pure network function.
--
-- * State dictionary
--
-- * Privileged access
--
-- * List of command arguments
--
-- * Return: Updated state dictionary, result table, update info
type HandlerFunc = BaseTools.Dictionary -> Bool -> [String] -> (BaseTools.Dictionary,[[String]], [[String]])
-- | Prototype of a network function that may shutdown the server.
-- | This prototype is very similar to the pure network function, but returns an additional value signalling
-- | a shutdown after processing. You do not need one, but should define one named "shutdown" if you don't want
-- | the server to shutdown if it reveices a function call with a function not in the registry and named
-- | "shutdown". Arguments:
-- |
-- | * State dictionary
-- |
-- | * Privileged flag
-- |
-- | * Return: List of result (empty, if not necessary), Information for synchronisation function, shutdown flag
type ShutdownFunc = BaseTools.Dictionary -> Bool -> [String] -> ([[String]], [[String]], Bool)
-- | Prototype of network function doing file synchrisations if necessary. Takes this arguments:
-- |
-- | * State dictionary
-- |
-- | * Update commands
-- |
-- | * Update commands
-- |
-- | * State dictionary
type SyncFunc = [[String]] -> BaseTools.Dictionary -> IO ()
-- | Socket function type. Either a pure function or a pair of a pure function and a non pure function.
data SocketFunction = SyncLess HandlerFunc -- ^ Only pass a pure function.
| Syncing HandlerFunc SyncFunc -- ^ Pass pure function and synchronisation function
| SyncLessShutdown ShutdownFunc -- ^ Pure function may signal shutdown, no synchronisation
| SyncingShutdown ShutdownFunc SyncFunc -- ^ Pure function may signal shutdown, synchronise eventually
-- | Map by name to a socket function.
type FunctionRegistry = Map.Map String SocketFunction
-- | Argument type for serveSocketUntilShutdown function
data ConnectionArguments = ConnectionArguments {
serverName :: String, -- ^ Server name or IP address
portName :: String, -- ^ Port as string or service name
privileged :: [String] -- ^ List of privileged addresses
} deriving Show
-- | Internal type to return a function for synchronisation and the data that function will need.
-- | The data is in a format the pure function would return. The function is packaged together with
-- | the pure function given on service initialisation.
data SyncingPair = SyncingPair SyncFunc [[String]]
| NoSync
-- | Extract contents from syncing pair. One could define defaults for NoSync, but
-- | this really is not needed.
syncData :: SyncingPair -- ^ Package of function and data
-> [[String]] -- ^ The data
syncData (SyncingPair _ d) = d
-- | Extract function from the syncing pair. One could as well define a default here for NoSync,
-- | but we really do not need it currently.
syncFunc :: SyncingPair -- ^ Package of function and data
-> SyncFunc -- ^ The function
syncFunc (SyncingPair f _) = f
-- | Default arguments. Use record syntax to update with your settings.
connectionDefault :: ConnectionArguments
connectionDefault = ConnectionArguments {
serverName = "localhost"
, portName = "1970"
, privileged = []
}
-- | Add pair of handler functions to registry
pushHandler :: FunctionRegistry -- ^ Current function registry
-> String -- ^ Name of this function to register
-> SocketFunction -- ^ SocketFunction to map
-> FunctionRegistry -- ^ Updated function registry
pushHandler r n f = case Map.lookup n r of
Nothing -> Map.insert n f r
_ -> error "Override handler function not supported. Most probably implementation error"
-- | Accept a registry of functions, a state dictionary, a server name and port/service name and
-- a list of privileged addresses and execute the functions as requested via the TCP socket.
serveSocketUntilShutdown :: FunctionRegistry -- ^ Registry of functions to execute via the socket.
-> BaseTools.Dictionary -- ^ State dictionary passed through all calls.
-> ConnectionArguments -- ^ Connection arguments
-> IO ()
serveSocketUntilShutdown registry storage server = do
addrs <- Socket.getAddrInfo
(Just (Socket.defaultHints { Socket.addrFlags = [Socket.AI_CANONNAME],
Socket.addrFamily = Socket.AF_INET,
Socket.addrSocketType = Socket.Stream }))
(Just $ serverName server)
(Just $ portName server)
let addr = head addrs
sock <- Socket.socket (Socket.addrFamily addr) (Socket.addrSocketType addr) (Socket.addrProtocol addr)
Socket.bindSocket sock (Socket.addrAddress addr)
Socket.listen sock 5
privList <- mapM Socket.inet_addr $ privileged server
print "Waiting for clients..."
_serveSocketRequest sock registry storage privList
-- | Extract address part from socket address. There should be a Network.Socket function for that ?!
_getAddressPart :: Socket.SockAddr -> Socket.HostAddress
_getAddressPart s = case s of
Socket.SockAddrInet port address -> address
_ -> error "Only IPV4 connections supported"
-- | Process one request via socket. This is called blocking.
_serveSocketRequest :: Socket.Socket -- ^ Bound socket
-> FunctionRegistry -- ^ Function registry
-> BaseTools.Dictionary -- ^ Current state.
-> [Socket.HostAddress] -- ^ List of privileged addresses
-> IO ()
_serveSocketRequest sock registry storage privilegedAddresses =
-- catch (using catches, but this maybe what Prelude.catch does):
-- IOException
-- NoMethodError (if using Dynamic?)
-- PatternMatchFail
-- ErrorCall
let handleError e = print ("Error in socket function:" ++ show e) >> return (storage, False)
handleRequest = do
(conn, addr) <- Socket.accept sock
let addrPart = _getAddressPart addr
let privileged = addrPart `elem` privilegedAddresses
print $ "Incoming connection:" ++ show addr ++ ", privileged:" ++ show privileged
hdl <- Socket.socketToHandle conn SysIO.ReadWriteMode
SysIO.hSetBuffering hdl SysIO.LineBuffering
line <- SysIO.hGetLine hdl
let strippedLine = BaseTools.splitBy separator $ take (length line - 1 ) line
let command = head strippedLine
let arguments = drop 1 strippedLine
print $ "Got command:" ++ command ++ ", args:" ++ show arguments
let (newMem, dbg, res, sync, sdown) = case Map.lookup command registry of
Nothing -> (storage
, Just $ "Function " ++ command ++ " not mapped, ignored"
, []
, NoSync
, command=="shutdown")
Just func -> _servePureCall func arguments storage privileged
case dbg of
Just message -> print message
Nothing -> return ()
case sync of
NoSync -> return ()
_ -> syncFunc sync (syncData sync) newMem
_writeOutput hdl res
SysIO.hClose hdl
return (newMem, sdown)
in do
-- [Handler (\ (e :: ErrorCall) -> print e)]
(mem, shutdown) <- Exception.catches handleRequest [
Exception.Handler (\ (e :: Exception.ErrorCall) -> handleError e),
Exception.Handler (\ (e :: Exception.IOException) -> handleError e)]
if shutdown then print "Shutdown request received." else _serveSocketRequest sock registry mem privilegedAddresses
-- | Internal function. Process one pure network function request
_servePureCall :: SocketFunction -- ^ Socket function to execute
-> [String] -- ^ Arguments for socket function
-> BaseTools.Dictionary -- ^ State dictionary
-> Bool -- ^ Privileged flag
-> (BaseTools.Dictionary, Maybe String, [[String]], SyncingPair, Bool)
_servePureCall (SyncLess handler) args storage priv =
let (s, res, _) = handler storage priv args
in (s, Nothing, res, NoSync, False)
_servePureCall (Syncing handler sync) args storage priv =
let (s, res, sy) = handler storage priv args
in (s, Nothing, res, SyncingPair sync sy, False)
_servePureCall (SyncLessShutdown handler) args storage priv =
let (res, _, sd) = handler storage priv args
in (storage, Just "Shutdown", res, NoSync, sd)
_servePureCall (SyncingShutdown handler sync) args storage priv =
let (res, sy, sd) = handler storage priv args
in (storage, Nothing, res, SyncingPair sync sy, sd)
-- | Write result list via socket to client. Every call is terminated with an empty line terminated by \r\n.
-- | The implementation simply does a fold from the right printing any row and terminating the result.
_writeOutput :: SysIO.Handle -- ^ Socket handle
-> [[String]] -- ^ Result table (possibly not rectangular)
-> IO ()
_writeOutput fd =
foldr
(\x ->
(>>)
(if null x then SysIO.hPutStr fd (nullCharacter:"\r\n")
else SysIO.hPutStr fd $ BaseTools.mergeWith separator x ++ "\r\n")
)
(SysIO.hPutStr fd "\r\n")
test :: IO ()
test = serveSocketUntilShutdown Map.empty BaseTools.emptyConfig connectionDefault
|
tnrangwi/CompareProgrammingUsingHolidayManager
|
haskell/SocketServer.hs
|
mit
| 12,605 | 0 | 20 | 3,296 | 1,899 | 1,054 | 845 | 147 | 5 |
module Main where
import Test.Tasty
import Test.Tasty.HUnit
import qualified Centrinel as C
import qualified Centrinel.Report as C
import Centrinel.System.RunLikeCC (runLikeCC, ParsedCC(..))
import Language.C.System.GCC (newGCC)
import qualified Centrinel.Util.Datafiles as CData
main :: IO ()
main = defaultMain smokeTests
smokeTests :: TestTree
smokeTests = testGroup "Smoke Tests"
[ testGroup "Examples run"
[ assertRunsTestCase "c-examples/incl.c"
, assertRunsTestCase "c-examples/c99.c"
]
]
assertRunsTestCase :: FilePath -> TestTree
assertRunsTestCase fp = testCase (fp ++ " runs") cmd
where
cmd = do
case runLikeCC gcc [fp] of
ParsedCC args [] -> do
ec <- C.report C.defaultOutputMethod fp $ C.runCentrinel datafiles gcc args
assertEqual "exit code" (Just ()) (const () <$> ec) -- throw away analysis results
NoInputFilesCC -> assertFailure $ "expected input files in smoketest " ++ fp
ErrorParsingCC err -> assertFailure $ "unexpected parse error \"" ++ err ++ "\" in smoketest " ++ fp
ParsedCC _args ignoredArgs -> assertFailure $ "unepxected ignored args " ++ show ignoredArgs ++ " in smoketest " ++ fp
gcc = newGCC "cc"
datafiles = CData.Datafiles "include/centrinel.h"
|
lambdageek/use-c
|
tests/smoke.hs
|
mit
| 1,278 | 0 | 18 | 264 | 332 | 177 | 155 | 27 | 4 |
import Data.Char (isLetter, toLower)
import Data.List (groupBy, sortBy)
isVowel :: Char -> Bool
isVowel = flip elem ['a', 'e', 'i', 'o', 'u', 'y', 'å', 'ä', 'ö'] . toLower
vowelGroups :: String -> [String]
vowelGroups "" = []
vowelGroups s@(c:cs) | isVowel c = fst sp : vowelGroups (snd sp)
where sp = span isVowel s
vowelGroups s = vowelGroups $ dropWhile (not . isVowel) s
vowelGroupSizes :: String -> [Int]
vowelGroupSizes = map length . vowelGroups
vowelGroupScore :: Int -> Int
vowelGroupScore i = i * (2 ^ i)
funniness :: String -> Int
funniness = sum . map vowelGroupScore . vowelGroupSizes
compareByFunninessDesc :: String -> String -> Ordering
compareByFunninessDesc a b = compare (funniness b) (funniness a)
main = do
getContents >>= mapM_ putStrLn . map (filter isLetter) . head . groupBy (\a b -> funniness a == funniness b) . sortBy compareByFunninessDesc . words
|
wunderdogsw/funniest-finnish-words
|
haskell-idiomatic-but-slow/HassutSanat.hs
|
mit
| 961 | 1 | 14 | 227 | 374 | 191 | 183 | 19 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.