code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE UndecidableInstances #-}
#if __GLASGOW_HASKELL__ >= 800
-- a) THQ works on cross-compilers and unregisterised GHCs
-- b) may make compilation faster as no dynamic loading is ever needed (not sure about this)
-- c) removes one hindrance to have code inferred as SafeHaskell safe
{-# LANGUAGE TemplateHaskellQuotes #-}
#else
{-# LANGUAGE TemplateHaskell #-}
#endif
#include "incoherent-compat.h"
#include "overlapping-compat.h"
{-|
Module: Data.Aeson.TH
Copyright: (c) 2011-2016 Bryan O'Sullivan
(c) 2011 MailRank, Inc.
License: BSD3
Stability: experimental
Portability: portable
Functions to mechanically derive 'ToJSON' and 'FromJSON' instances. Note that
you need to enable the @TemplateHaskell@ language extension in order to use this
module.
An example shows how instances are generated for arbitrary data types. First we
define a data type:
@
data D a = Nullary
| Unary Int
| Product String Char a
| Record { testOne :: Double
, testTwo :: Bool
, testThree :: D a
} deriving Eq
@
Next we derive the necessary instances. Note that we make use of the
feature to change record field names. In this case we drop the first 4
characters of every field name. We also modify constructor names by
lower-casing them:
@
$('deriveJSON' 'defaultOptions'{'fieldLabelModifier' = 'drop' 4, 'constructorTagModifier' = map toLower} ''D)
@
Now we can use the newly created instances.
@
d :: D 'Int'
d = Record { testOne = 3.14159
, testTwo = 'True'
, testThree = Product \"test\" \'A\' 123
}
@
>>> fromJSON (toJSON d) == Success d
> True
This also works for data family instances, but instead of passing in the data
family name (with double quotes), we pass in a data family instance
constructor (with a single quote):
@
data family DF a
data instance DF Int = DF1 Int
| DF2 Int Int
deriving Eq
$('deriveJSON' 'defaultOptions' 'DF1)
-- Alternatively, one could pass 'DF2 instead
@
Please note that you can derive instances for tuples using the following syntax:
@
-- FromJSON and ToJSON instances for 4-tuples.
$('deriveJSON' 'defaultOptions' ''(,,,))
@
-}
module Data.Aeson.TH
(
-- * Encoding configuration
Options(..)
, SumEncoding(..)
, defaultOptions
, defaultTaggedObject
-- * FromJSON and ToJSON derivation
, deriveJSON
, deriveJSON1
, deriveJSON2
, deriveToJSON
, deriveToJSON1
, deriveToJSON2
, deriveFromJSON
, deriveFromJSON1
, deriveFromJSON2
, mkToJSON
, mkLiftToJSON
, mkLiftToJSON2
, mkToEncoding
, mkLiftToEncoding
, mkLiftToEncoding2
, mkParseJSON
, mkLiftParseJSON
, mkLiftParseJSON2
) where
import Prelude ()
import Prelude.Compat hiding (exp)
import Control.Applicative ((<|>))
import Data.Aeson (Object, (.=), (.:), FromJSON(..), FromJSON1(..), FromJSON2(..), ToJSON(..), ToJSON1(..), ToJSON2(..))
import Data.Aeson.Types (Options(..), Parser, SumEncoding(..), Value(..), defaultOptions, defaultTaggedObject)
import Data.Aeson.Types.Internal ((<?>), Pair, JSONPathElement(Key))
import Data.Aeson.Types.FromJSON (parseOptionalFieldWith)
import Control.Monad (liftM2, unless, when)
import Data.Foldable (foldr')
#if MIN_VERSION_template_haskell(2,8,0) && !MIN_VERSION_template_haskell(2,10,0)
import Data.List (nub)
#endif
import Data.List (find, foldl', genericLength , intercalate , intersperse, partition, union)
import Data.List.NonEmpty ((<|), NonEmpty((:|)))
import Data.Map (Map)
import Data.Maybe (catMaybes, fromMaybe, mapMaybe)
import Data.Set (Set)
#if MIN_VERSION_template_haskell(2,8,0)
import Language.Haskell.TH hiding (Arity)
#else
import Language.Haskell.TH
#endif
import Language.Haskell.TH.Syntax (VarStrictType)
#if MIN_VERSION_template_haskell(2,7,0) && !(MIN_VERSION_template_haskell(2,8,0))
import Language.Haskell.TH.Lib (starK)
#endif
#if MIN_VERSION_template_haskell(2,8,0) && !(MIN_VERSION_template_haskell(2,10,0))
import Language.Haskell.TH.Syntax (mkNameG_tc)
#endif
import Text.Printf (printf)
import qualified Data.Aeson as A
import qualified Data.Aeson.Encoding.Internal as E
import qualified Data.Foldable as F (all)
import qualified Data.HashMap.Strict as H (lookup, toList)
import qualified Data.List.NonEmpty as NE (drop, length, reverse, splitAt)
import qualified Data.Map as M (fromList, findWithDefault, keys, lookup , singleton, size)
import qualified Data.Set as Set (empty, insert, member)
import qualified Data.Text as T (Text, pack, unpack)
import qualified Data.Vector as V (unsafeIndex, null, length, create, fromList)
import qualified Data.Vector.Mutable as VM (unsafeNew, unsafeWrite)
--------------------------------------------------------------------------------
-- Convenience
--------------------------------------------------------------------------------
-- | Generates both 'ToJSON' and 'FromJSON' instance declarations for the given
-- data type or data family instance constructor.
--
-- This is a convienience function which is equivalent to calling both
-- 'deriveToJSON' and 'deriveFromJSON'.
deriveJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate 'ToJSON' and 'FromJSON'
-- instances.
-> Q [Dec]
deriveJSON = deriveJSONBoth deriveToJSON deriveFromJSON
-- | Generates both 'ToJSON1' and 'FromJSON1' instance declarations for the given
-- data type or data family instance constructor.
--
-- This is a convienience function which is equivalent to calling both
-- 'deriveToJSON1' and 'deriveFromJSON1'.
deriveJSON1 :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate 'ToJSON1' and 'FromJSON1'
-- instances.
-> Q [Dec]
deriveJSON1 = deriveJSONBoth deriveToJSON1 deriveFromJSON1
-- | Generates both 'ToJSON2' and 'FromJSON2' instance declarations for the given
-- data type or data family instance constructor.
--
-- This is a convienience function which is equivalent to calling both
-- 'deriveToJSON2' and 'deriveFromJSON2'.
deriveJSON2 :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate 'ToJSON2' and 'FromJSON2'
-- instances.
-> Q [Dec]
deriveJSON2 = deriveJSONBoth deriveToJSON2 deriveFromJSON2
--------------------------------------------------------------------------------
-- ToJSON
--------------------------------------------------------------------------------
{-
TODO: Don't constrain phantom type variables.
data Foo a = Foo Int
instance (ToJSON a) ⇒ ToJSON Foo where ...
The above (ToJSON a) constraint is not necessary and perhaps undesirable.
-}
-- | Generates a 'ToJSON' instance declaration for the given data type or
-- data family instance constructor.
deriveToJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'ToJSON' instance
-- declaration.
-> Q [Dec]
deriveToJSON = deriveToJSONCommon toJSONClass
-- | Generates a 'ToJSON1' instance declaration for the given data type or
-- data family instance constructor.
deriveToJSON1 :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'ToJSON1' instance
-- declaration.
-> Q [Dec]
deriveToJSON1 = deriveToJSONCommon toJSON1Class
-- | Generates a 'ToJSON2' instance declaration for the given data type or
-- data family instance constructor.
deriveToJSON2 :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'ToJSON2' instance
-- declaration.
-> Q [Dec]
deriveToJSON2 = deriveToJSONCommon toJSON2Class
deriveToJSONCommon :: JSONClass
-- ^ The ToJSON variant being derived.
-> Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate an instance.
-> Q [Dec]
deriveToJSONCommon = deriveJSONClass [ (ToJSON, \jc _ -> consToValue jc)
, (ToEncoding, \jc _ -> consToEncoding jc)
]
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a 'Value'.
mkToJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkToJSON = mkToJSONCommon toJSONClass
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a 'Value' by using the given encoding
-- function on occurrences of the last type parameter.
mkLiftToJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkLiftToJSON = mkToJSONCommon toJSON1Class
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a 'Value' by using the given encoding
-- functions on occurrences of the last two type parameters.
mkLiftToJSON2 :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkLiftToJSON2 = mkToJSONCommon toJSON2Class
mkToJSONCommon :: JSONClass -- ^ Which class's method is being derived.
-> Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkToJSONCommon = mkFunCommon (\jc _ -> consToValue jc)
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a JSON string.
mkToEncoding :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkToEncoding = mkToEncodingCommon toJSONClass
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a JSON string by using the given encoding
-- function on occurrences of the last type parameter.
mkLiftToEncoding :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkLiftToEncoding = mkToEncodingCommon toJSON1Class
-- | Generates a lambda expression which encodes the given data type or
-- data family instance constructor as a JSON string by using the given encoding
-- functions on occurrences of the last two type parameters.
mkLiftToEncoding2 :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkLiftToEncoding2 = mkToEncodingCommon toJSON2Class
mkToEncodingCommon :: JSONClass -- ^ Which class's method is being derived.
-> Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkToEncodingCommon = mkFunCommon (\jc _ -> consToEncoding jc)
-- | Helper function used by both 'deriveToJSON' and 'mkToJSON'. Generates
-- code to generate a 'Value' of a number of constructors. All constructors
-- must be from the same type.
consToValue :: JSONClass
-- ^ The ToJSON variant being derived.
-> Options
-- ^ Encoding options.
-> [Con]
-- ^ Constructors for which to generate JSON generating code.
-> Q Exp
consToValue _ _ [] = error $ "Data.Aeson.TH.consToValue: "
++ "Not a single constructor given!"
consToValue jc opts cons = do
value <- newName "value"
tjs <- newNameList "_tj" $ arityInt jc
tjls <- newNameList "_tjl" $ arityInt jc
let zippedTJs = zip tjs tjls
interleavedTJs = interleave tjs tjls
lamE (map varP $ interleavedTJs ++ [value]) $
caseE (varE value) (matches zippedTJs)
where
matches tjs = case cons of
-- A single constructor is directly encoded. The constructor itself may be
-- forgotten.
[con] -> [argsToValue jc tjs opts False con]
_ | allNullaryToStringTag opts && all isNullary cons ->
[ match (conP conName []) (normalB $ conStr opts conName) []
| con <- cons
, let conName = getConName con
]
| otherwise -> [argsToValue jc tjs opts True con | con <- cons]
conStr :: Options -> Name -> Q Exp
conStr opts = appE [|String|] . conTxt opts
conTxt :: Options -> Name -> Q Exp
conTxt opts = appE [|T.pack|] . conStringE opts
conStringE :: Options -> Name -> Q Exp
conStringE opts = stringE . constructorTagModifier opts . nameBase
-- | Helper function used by both 'deriveToJSON' and 'mkToEncoding'. Generates
-- code to write out a value for a number of constructors. All constructors
-- must be from the same type.
consToEncoding :: JSONClass
-- ^ The ToJSON variant being derived.
-> Options
-- ^ Encoding options.
-> [Con]
-- ^ Constructors for which to generate JSON generating code.
-> Q Exp
consToEncoding _ _ [] = error $ "Data.Aeson.TH.consToEncoding: "
++ "Not a single constructor given!"
consToEncoding jc opts cons = do
value <- newName "value"
tes <- newNameList "_te" $ arityInt jc
tels <- newNameList "_tel" $ arityInt jc
let zippedTEs = zip tes tels
interleavedTEs = interleave tes tels
lamE (map varP $ interleavedTEs ++ [value]) $
caseE (varE value) (matches zippedTEs)
where
matches tes = case cons of
-- A single constructor is directly encoded. The constructor itself may be
-- forgotten.
[con] -> [argsToEncoding jc tes opts False con]
-- Encode just the name of the constructor of a sum type iff all the
-- constructors are nullary.
_ | allNullaryToStringTag opts && all isNullary cons ->
[ match (conP conName [])
(normalB $ encStr opts conName) []
| con <- cons
, let conName = getConName con
]
| otherwise -> [argsToEncoding jc tes opts True con | con <- cons]
encStr :: Options -> Name -> Q Exp
encStr opts = appE [|E.text|] . conTxt opts
-- | If constructor is nullary.
isNullary :: Con -> Bool
isNullary (NormalC _ []) = True
isNullary _ = False
sumToValue :: Options -> Bool -> Name -> Q Exp -> Q Exp
sumToValue opts multiCons conName exp
| multiCons =
case sumEncoding opts of
TwoElemArray ->
[|Array|] `appE` ([|V.fromList|] `appE` listE [conStr opts conName, exp])
TaggedObject{tagFieldName, contentsFieldName} ->
[|A.object|] `appE` listE
[ infixApp [|T.pack tagFieldName|] [|(.=)|] (conStr opts conName)
, infixApp [|T.pack contentsFieldName|] [|(.=)|] exp
]
ObjectWithSingleField ->
[|A.object|] `appE` listE
[ infixApp (conTxt opts conName) [|(.=)|] exp
]
UntaggedValue -> exp
| otherwise = exp
nullarySumToValue :: Options -> Bool -> Name -> Q Exp
nullarySumToValue opts multiCons conName =
case sumEncoding opts of
TaggedObject{tagFieldName} ->
[|A.object|] `appE` listE
[ infixApp [|T.pack tagFieldName|] [|(.=)|] (conStr opts conName)
]
UntaggedValue -> conStr opts conName
_ -> sumToValue opts multiCons conName [e|toJSON ([] :: [()])|]
-- | Generates code to generate the JSON encoding of a single constructor.
argsToValue :: JSONClass -> [(Name, Name)] -> Options -> Bool -> Con -> Q Match
-- Nullary constructors. Generates code that explicitly matches against the
-- constructor even though it doesn't contain data. This is useful to prevent
-- type errors.
argsToValue jc tjs opts multiCons (NormalC conName []) = do
([], _) <- reifyConTys jc tjs conName
match (conP conName [])
(normalB (nullarySumToValue opts multiCons conName))
[]
-- Polyadic constructors with special case for unary constructors.
argsToValue jc tjs opts multiCons (NormalC conName ts) = do
(argTys, tvMap) <- reifyConTys jc tjs conName
let len = length ts
args <- newNameList "arg" len
js <- case [ dispatchToJSON jc conName tvMap argTy
`appE` varE arg
| (arg, argTy) <- zip args argTys
] of
-- Single argument is directly converted.
[e] -> return e
-- Multiple arguments are converted to a JSON array.
es -> do
mv <- newName "mv"
let newMV = bindS (varP mv)
([|VM.unsafeNew|] `appE`
litE (integerL $ fromIntegral len))
stmts = [ noBindS $
[|VM.unsafeWrite|] `appE`
varE mv `appE`
litE (integerL ix) `appE`
e
| (ix, e) <- zip [(0::Integer)..] es
]
ret = noBindS $ [|return|] `appE` varE mv
return $ [|Array|] `appE`
(varE 'V.create `appE`
doE (newMV:stmts++[ret]))
match (conP conName $ map varP args)
(normalB $ sumToValue opts multiCons conName js)
[]
-- Records.
argsToValue jc tjs opts multiCons (RecC conName ts) = case (unwrapUnaryRecords opts, not multiCons, ts) of
(True,True,[(_,st,ty)]) -> argsToValue jc tjs opts multiCons (NormalC conName [(st,ty)])
_ -> do
(argTys, tvMap) <- reifyConTys jc tjs conName
args <- newNameList "arg" $ length ts
let exp = [|A.object|] `appE` pairs
pairs | omitNothingFields opts = infixApp maybeFields
[|(++)|]
restFields
| otherwise = listE $ map toPair argCons
argCons = zip3 args argTys ts
maybeFields = [|catMaybes|] `appE` listE (map maybeToPair maybes)
restFields = listE $ map toPair rest
(maybes, rest) = partition isMaybe argCons
maybeToPair (arg, argTy, (field, _, _)) =
infixApp ([|keyValuePairWith|]
`appE` dispatchToJSON jc conName tvMap argTy
`appE` toFieldName field)
[|(<$>)|]
(varE arg)
toPair (arg, argTy, (field, _, _)) =
[|keyValuePairWith|]
`appE` dispatchToJSON jc conName tvMap argTy
`appE` toFieldName field
`appE` varE arg
toFieldName field = [|T.pack|] `appE` fieldLabelExp opts field
match (conP conName $ map varP args)
( normalB
$ if multiCons
then case sumEncoding opts of
TwoElemArray -> [|toJSON|] `appE` tupE [conStr opts conName, exp]
TaggedObject{tagFieldName} ->
[|A.object|] `appE`
-- TODO: Maybe throw an error in case
-- tagFieldName overwrites a field in pairs.
infixApp (infixApp [|T.pack tagFieldName|]
[|(.=)|]
(conStr opts conName))
[|(:)|]
pairs
ObjectWithSingleField ->
[|A.object|] `appE` listE
[ infixApp (conTxt opts conName) [|(.=)|] exp ]
UntaggedValue -> exp
else exp
) []
-- Infix constructors.
argsToValue jc tjs opts multiCons (InfixC _ conName _) = do
([alTy, arTy], tvMap) <- reifyConTys jc tjs conName
al <- newName "argL"
ar <- newName "argR"
match (infixP (varP al) conName (varP ar))
( normalB
$ sumToValue opts multiCons conName
$ [|toJSON|] `appE` listE [ dispatchToJSON jc conName tvMap aTy
`appE` varE a
| (a, aTy) <- [(al,alTy), (ar,arTy)]
]
)
[]
-- Existentially quantified constructors.
argsToValue jc tjs opts multiCons (ForallC _ _ con) =
argsToValue jc tjs opts multiCons con
#if MIN_VERSION_template_haskell(2,11,0)
-- GADTs.
argsToValue jc tjs opts multiCons (GadtC conNames ts _) =
argsToValue jc tjs opts multiCons $ NormalC (head conNames) ts
argsToValue jc tjs opts multiCons (RecGadtC conNames ts _) =
argsToValue jc tjs opts multiCons $ RecC (head conNames) ts
#endif
isMaybe :: (a, b, (c, d, Type)) -> Bool
isMaybe (_, _, (_, _, AppT (ConT t) _)) = t == ''Maybe
isMaybe _ = False
(<^>) :: ExpQ -> ExpQ -> ExpQ
(<^>) a b = infixApp a [|(E.><)|] b
infixr 6 <^>
(<:>) :: ExpQ -> ExpQ -> ExpQ
(<:>) a b = a <^> [|E.colon|] <^> b
infixr 5 <:>
(<%>) :: ExpQ -> ExpQ -> ExpQ
(<%>) a b = a <^> [|E.comma|] <^> b
infixr 4 <%>
array :: ExpQ -> ExpQ
array exp = [|E.wrapArray|] `appE` exp
object :: ExpQ -> ExpQ
object exp = [|E.wrapObject|] `appE` exp
sumToEncoding :: Options -> Bool -> Name -> Q Exp -> Q Exp
sumToEncoding opts multiCons conName exp
| multiCons =
let fexp = exp in
case sumEncoding opts of
TwoElemArray ->
array (encStr opts conName <%> fexp)
TaggedObject{tagFieldName, contentsFieldName} ->
object $
([|E.text (T.pack tagFieldName)|] <:> encStr opts conName) <%>
([|E.text (T.pack contentsFieldName)|] <:> fexp)
ObjectWithSingleField ->
object (encStr opts conName <:> fexp)
UntaggedValue -> exp
| otherwise = exp
nullarySumToEncoding :: Options -> Bool -> Name -> Q Exp
nullarySumToEncoding opts multiCons conName =
case sumEncoding opts of
TaggedObject{tagFieldName} ->
object $
[|E.text (T.pack tagFieldName)|] <:> encStr opts conName
UntaggedValue -> encStr opts conName
_ -> sumToEncoding opts multiCons conName [e|toEncoding ([] :: [()])|]
-- | Generates code to generate the JSON encoding of a single constructor.
argsToEncoding :: JSONClass -> [(Name, Name)] -> Options -> Bool -> Con -> Q Match
-- Nullary constructors. Generates code that explicitly matches against the
-- constructor even though it doesn't contain data. This is useful to prevent
-- type errors.
argsToEncoding jc tes opts multiCons (NormalC conName []) = do
([], _) <- reifyConTys jc tes conName
match (conP conName [])
(normalB (nullarySumToEncoding opts multiCons conName))
[]
-- Polyadic constructors with special case for unary constructors.
argsToEncoding jc tes opts multiCons (NormalC conName ts) = do
(argTys, tvMap) <- reifyConTys jc tes conName
let len = length ts
args <- newNameList "arg" len
js <- case zip args argTys of
-- Single argument is directly converted.
[(e,eTy)] -> return (dispatchToEncoding jc conName tvMap eTy
`appE` varE e)
-- Multiple arguments are converted to a JSON array.
es ->
return (array (foldr1 (<%>) [ dispatchToEncoding jc conName tvMap xTy
`appE` varE x
| (x,xTy) <- es
]))
match (conP conName $ map varP args)
(normalB $ sumToEncoding opts multiCons conName js)
[]
-- Records.
argsToEncoding jc tes opts multiCons (RecC conName ts) = case (unwrapUnaryRecords opts, not multiCons, ts) of
(True,True,[(_,st,ty)]) -> argsToEncoding jc tes opts multiCons (NormalC conName [(st,ty)])
_ -> do
args <- newNameList "arg" $ length ts
(argTys, tvMap) <- reifyConTys jc tes conName
let exp = object objBody
objBody = [|E.econcat|] `appE`
([|intersperse E.comma|] `appE` pairs)
pairs | omitNothingFields opts = infixApp maybeFields
[|(++)|]
restFields
| otherwise = listE (map toPair argCons)
argCons = zip3 args argTys ts
maybeFields = [|catMaybes|] `appE` listE (map maybeToPair maybes)
restFields = listE (map toPair rest)
(maybes, rest) = partition isMaybe argCons
maybeToPair (arg, argTy, (field, _, _)) =
infixApp
(infixApp
(infixE
(Just $ toFieldName field <^> [|E.colon|])
[|(E.><)|]
Nothing)
[|(.)|]
(dispatchToEncoding jc conName tvMap argTy))
[|(<$>)|]
(varE arg)
toPair (arg, argTy, (field, _, _)) =
toFieldName field
<:> dispatchToEncoding jc conName tvMap argTy
`appE` varE arg
toFieldName field = [|E.text|] `appE`
([|T.pack|] `appE` fieldLabelExp opts field)
match (conP conName $ map varP args)
( normalB
$ if multiCons
then case sumEncoding opts of
TwoElemArray -> array $
encStr opts conName <%> exp
TaggedObject{tagFieldName} -> object $
([|E.text (T.pack tagFieldName)|] <:>
encStr opts conName) <%>
objBody
ObjectWithSingleField -> object $
encStr opts conName <:> exp
UntaggedValue -> exp
else exp
) []
-- Infix constructors.
argsToEncoding jc tes opts multiCons (InfixC _ conName _) = do
al <- newName "argL"
ar <- newName "argR"
([alTy,arTy], tvMap) <- reifyConTys jc tes conName
match (infixP (varP al) conName (varP ar))
( normalB
$ sumToEncoding opts multiCons conName
$ array (foldr1 (<%>) [ dispatchToEncoding jc conName tvMap aTy
`appE` varE a
| (a,aTy) <- [(al,alTy), (ar,arTy)]
])
)
[]
-- Existentially quantified constructors.
argsToEncoding jc tes opts multiCons (ForallC _ _ con) =
argsToEncoding jc tes opts multiCons con
#if MIN_VERSION_template_haskell(2,11,0)
-- GADTs.
argsToEncoding jc tes opts multiCons (GadtC conNames ts _) =
argsToEncoding jc tes opts multiCons $ NormalC (head conNames) ts
argsToEncoding jc tes opts multiCons (RecGadtC conNames ts _) =
argsToEncoding jc tes opts multiCons $ RecC (head conNames) ts
#endif
--------------------------------------------------------------------------------
-- FromJSON
--------------------------------------------------------------------------------
-- | Generates a 'FromJSON' instance declaration for the given data type or
-- data family instance constructor.
deriveFromJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'FromJSON' instance
-- declaration.
-> Q [Dec]
deriveFromJSON = deriveFromJSONCommon fromJSONClass
-- | Generates a 'FromJSON1' instance declaration for the given data type or
-- data family instance constructor.
deriveFromJSON1 :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'FromJSON1' instance
-- declaration.
-> Q [Dec]
deriveFromJSON1 = deriveFromJSONCommon fromJSON1Class
-- | Generates a 'FromJSON2' instance declaration for the given data type or
-- data family instance constructor.
deriveFromJSON2 :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'FromJSON3' instance
-- declaration.
-> Q [Dec]
deriveFromJSON2 = deriveFromJSONCommon fromJSON2Class
deriveFromJSONCommon :: JSONClass
-- ^ The FromJSON variant being derived.
-> Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate an instance.
-- declaration.
-> Q [Dec]
deriveFromJSONCommon = deriveJSONClass [(ParseJSON, consFromJSON)]
-- | Generates a lambda expression which parses the JSON encoding of the given
-- data type or data family instance constructor.
mkParseJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkParseJSON = mkParseJSONCommon fromJSONClass
-- | Generates a lambda expression which parses the JSON encoding of the given
-- data type or data family instance constructor by using the given parsing
-- function on occurrences of the last type parameter.
mkLiftParseJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkLiftParseJSON = mkParseJSONCommon fromJSON1Class
-- | Generates a lambda expression which parses the JSON encoding of the given
-- data type or data family instance constructor by using the given parsing
-- functions on occurrences of the last two type parameters.
mkLiftParseJSON2 :: Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkLiftParseJSON2 = mkParseJSONCommon fromJSON2Class
mkParseJSONCommon :: JSONClass -- ^ Which class's method is being derived.
-> Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkParseJSONCommon = mkFunCommon consFromJSON
-- | Helper function used by both 'deriveFromJSON' and 'mkParseJSON'. Generates
-- code to parse the JSON encoding of a number of constructors. All constructors
-- must be from the same type.
consFromJSON :: JSONClass
-- ^ The FromJSON variant being derived.
-> Name
-- ^ Name of the type to which the constructors belong.
-> Options
-- ^ Encoding options
-> [Con]
-- ^ Constructors for which to generate JSON parsing code.
-> Q Exp
consFromJSON _ _ _ [] = error $ "Data.Aeson.TH.consFromJSON: "
++ "Not a single constructor given!"
consFromJSON jc tName opts cons = do
value <- newName "value"
pjs <- newNameList "_pj" $ arityInt jc
pjls <- newNameList "_pjl" $ arityInt jc
let zippedPJs = zip pjs pjls
interleavedPJs = interleave pjs pjls
lamE (map varP $ interleavedPJs ++ [value]) $ lamExpr value zippedPJs
where
lamExpr value pjs = case cons of
[con] -> parseArgs jc pjs tName opts con (Right value)
_ | sumEncoding opts == UntaggedValue
-> parseUntaggedValue pjs cons value
| otherwise
-> caseE (varE value) $
if allNullaryToStringTag opts && all isNullary cons
then allNullaryMatches
else mixedMatches pjs
allNullaryMatches =
[ do txt <- newName "txt"
match (conP 'String [varP txt])
(guardedB $
[ liftM2 (,) (normalG $
infixApp (varE txt)
[|(==)|]
([|T.pack|] `appE`
conStringE opts conName)
)
([|pure|] `appE` conE conName)
| con <- cons
, let conName = getConName con
]
++
[ liftM2 (,)
(normalG [|otherwise|])
( [|noMatchFail|]
`appE` litE (stringL $ show tName)
`appE` ([|T.unpack|] `appE` varE txt)
)
]
)
[]
, do other <- newName "other"
match (varP other)
(normalB $ [|noStringFail|]
`appE` litE (stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
mixedMatches pjs =
case sumEncoding opts of
TaggedObject {tagFieldName, contentsFieldName} ->
parseObject $ parseTaggedObject pjs tagFieldName contentsFieldName
UntaggedValue -> error "UntaggedValue: Should be handled already"
ObjectWithSingleField ->
parseObject $ parseObjectWithSingleField pjs
TwoElemArray ->
[ do arr <- newName "array"
match (conP 'Array [varP arr])
(guardedB
[ liftM2 (,) (normalG $ infixApp ([|V.length|] `appE` varE arr)
[|(==)|]
(litE $ integerL 2))
(parse2ElemArray pjs arr)
, liftM2 (,) (normalG [|otherwise|])
([|not2ElemArray|]
`appE` litE (stringL $ show tName)
`appE` ([|V.length|] `appE` varE arr))
]
)
[]
, do other <- newName "other"
match (varP other)
( normalB
$ [|noArrayFail|]
`appE` litE (stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
parseObject f =
[ do obj <- newName "obj"
match (conP 'Object [varP obj]) (normalB $ f obj) []
, do other <- newName "other"
match (varP other)
( normalB
$ [|noObjectFail|]
`appE` litE (stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
parseTaggedObject pjs typFieldName valFieldName obj = do
conKey <- newName "conKey"
doE [ bindS (varP conKey)
(infixApp (varE obj)
[|(.:)|]
([|T.pack|] `appE` stringE typFieldName))
, noBindS $ parseContents pjs conKey (Left (valFieldName, obj)) 'conNotFoundFailTaggedObject
]
parseUntaggedValue pjs cons' conVal =
foldr1 (\e e' -> infixApp e [|(<|>)|] e')
(map (\x -> parseValue pjs x conVal) cons')
parseValue _pjs (NormalC conName []) conVal = do
str <- newName "str"
caseE (varE conVal)
[ match (conP 'String [varP str])
(guardedB
[ liftM2 (,) (normalG $ infixApp (varE str) [|(==)|] ([|T.pack|] `appE` conStringE opts conName)
)
([|pure|] `appE` conE conName)
]
)
[]
, matchFailed tName conName "String"
]
parseValue pjs con conVal =
parseArgs jc pjs tName opts con (Right conVal)
parse2ElemArray pjs arr = do
conKey <- newName "conKey"
conVal <- newName "conVal"
let letIx n ix =
valD (varP n)
(normalB ([|V.unsafeIndex|] `appE`
varE arr `appE`
litE (integerL ix)))
[]
letE [ letIx conKey 0
, letIx conVal 1
]
(caseE (varE conKey)
[ do txt <- newName "txt"
match (conP 'String [varP txt])
(normalB $ parseContents pjs
txt
(Right conVal)
'conNotFoundFail2ElemArray
)
[]
, do other <- newName "other"
match (varP other)
( normalB
$ [|firstElemNoStringFail|]
`appE` litE (stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
)
parseObjectWithSingleField pjs obj = do
conKey <- newName "conKey"
conVal <- newName "conVal"
caseE ([e|H.toList|] `appE` varE obj)
[ match (listP [tupP [varP conKey, varP conVal]])
(normalB $ parseContents pjs conKey (Right conVal) 'conNotFoundFailObjectSingleField)
[]
, do other <- newName "other"
match (varP other)
(normalB $ [|wrongPairCountFail|]
`appE` litE (stringL $ show tName)
`appE` ([|show . length|] `appE` varE other)
)
[]
]
parseContents pjs conKey contents errorFun =
caseE (varE conKey)
[ match wildP
( guardedB $
[ do g <- normalG $ infixApp (varE conKey)
[|(==)|]
([|T.pack|] `appE`
conNameExp opts con)
e <- parseArgs jc pjs tName opts con contents
return (g, e)
| con <- cons
]
++
[ liftM2 (,)
(normalG [e|otherwise|])
( varE errorFun
`appE` litE (stringL $ show tName)
`appE` listE (map ( litE
. stringL
. constructorTagModifier opts
. nameBase
. getConName
) cons
)
`appE` ([|T.unpack|] `appE` varE conKey)
)
]
)
[]
]
parseNullaryMatches :: Name -> Name -> [Q Match]
parseNullaryMatches tName conName =
[ do arr <- newName "arr"
match (conP 'Array [varP arr])
(guardedB
[ liftM2 (,) (normalG $ [|V.null|] `appE` varE arr)
([|pure|] `appE` conE conName)
, liftM2 (,) (normalG [|otherwise|])
(parseTypeMismatch tName conName
(litE $ stringL "an empty Array")
(infixApp (litE $ stringL "Array of length ")
[|(++)|]
([|show . V.length|] `appE` varE arr)
)
)
]
)
[]
, matchFailed tName conName "Array"
]
parseUnaryMatches :: JSONClass -> TyVarMap -> Type -> Name -> [Q Match]
parseUnaryMatches jc tvMap argTy conName =
[ do arg <- newName "arg"
match (varP arg)
( normalB $ infixApp (conE conName)
[|(<$>)|]
(dispatchParseJSON jc conName tvMap argTy
`appE` varE arg)
)
[]
]
parseRecord :: JSONClass
-> TyVarMap
-> [Type]
-> Options
-> Name
-> Name
-> [VarStrictType]
-> Name
-> ExpQ
parseRecord jc tvMap argTys opts tName conName ts obj =
foldl' (\a b -> infixApp a [|(<*>)|] b)
(infixApp (conE conName) [|(<$>)|] x)
xs
where
x:xs = [ [|lookupField|]
`appE` dispatchParseJSON jc conName tvMap argTy
`appE` litE (stringL $ show tName)
`appE` litE (stringL $ constructorTagModifier opts $ nameBase conName)
`appE` varE obj
`appE` ( [|T.pack|] `appE` fieldLabelExp opts field
)
| ((field, _, _), argTy) <- zip ts argTys
]
getValField :: Name -> String -> [MatchQ] -> Q Exp
getValField obj valFieldName matches = do
val <- newName "val"
doE [ bindS (varP val) $ infixApp (varE obj)
[|(.:)|]
([|T.pack|] `appE`
litE (stringL valFieldName))
, noBindS $ caseE (varE val) matches
]
matchCases :: Either (String, Name) Name -> [MatchQ] -> Q Exp
matchCases (Left (valFieldName, obj)) = getValField obj valFieldName
matchCases (Right valName) = caseE (varE valName)
-- | Generates code to parse the JSON encoding of a single constructor.
parseArgs :: JSONClass -- ^ The FromJSON variant being derived.
-> [(Name, Name)] -- ^ The names of the encoding/decoding function arguments.
-> Name -- ^ Name of the type to which the constructor belongs.
-> Options -- ^ Encoding options.
-> Con -- ^ Constructor for which to generate JSON parsing code.
-> Either (String, Name) Name -- ^ Left (valFieldName, objName) or
-- Right valName
-> Q Exp
-- Nullary constructors.
parseArgs jc pjs _ _ (NormalC conName []) (Left _) = do
([], _) <- reifyConTys jc pjs conName
[|pure|] `appE` conE conName
parseArgs jc pjs tName _ (NormalC conName []) (Right valName) = do
([], _) <- reifyConTys jc pjs conName
caseE (varE valName) $ parseNullaryMatches tName conName
-- Unary constructors.
parseArgs jc pjs _ _ (NormalC conName [_]) contents = do
([argTy], tvMap) <- reifyConTys jc pjs conName
matchCases contents $ parseUnaryMatches jc tvMap argTy conName
-- Polyadic constructors.
parseArgs jc pjs tName _ (NormalC conName ts) contents = do
(argTys, tvMap) <- reifyConTys jc pjs conName
let len = genericLength ts
matchCases contents $ parseProduct jc tvMap argTys tName conName len
-- Records.
parseArgs jc pjs tName opts (RecC conName ts) (Left (_, obj)) = do
(argTys, tvMap) <- reifyConTys jc pjs conName
parseRecord jc tvMap argTys opts tName conName ts obj
parseArgs jc pjs tName opts (RecC conName ts) (Right valName) = case (unwrapUnaryRecords opts,ts) of
(True,[(_,st,ty)])-> parseArgs jc pjs tName opts (NormalC conName [(st,ty)]) (Right valName)
_ -> do
obj <- newName "recObj"
(argTys, tvMap) <- reifyConTys jc pjs conName
caseE (varE valName)
[ match (conP 'Object [varP obj]) (normalB $
parseRecord jc tvMap argTys opts tName conName ts obj) []
, matchFailed tName conName "Object"
]
-- Infix constructors. Apart from syntax these are the same as
-- polyadic constructors.
parseArgs jc pjs tName _ (InfixC _ conName _) contents = do
(argTys, tvMap) <- reifyConTys jc pjs conName
matchCases contents $ parseProduct jc tvMap argTys tName conName 2
-- Existentially quantified constructors. We ignore the quantifiers
-- and proceed with the contained constructor.
parseArgs jc pjs tName opts (ForallC _ _ con) contents =
parseArgs jc pjs tName opts con contents
#if MIN_VERSION_template_haskell(2,11,0)
-- GADTs. We ignore the refined return type and proceed as if it were a
-- NormalC or RecC.
parseArgs jc pjs tName opts (GadtC conNames ts _) contents =
parseArgs jc pjs tName opts (NormalC (head conNames) ts) contents
parseArgs jc pjs tName opts (RecGadtC conNames ts _) contents =
parseArgs jc pjs tName opts (RecC (head conNames) ts) contents
#endif
-- | Generates code to parse the JSON encoding of an n-ary
-- constructor.
parseProduct :: JSONClass -- ^ The FromJSON variant being derived.
-> TyVarMap -- ^ Maps the last type variables to their decoding
-- function arguments.
-> [Type] -- ^ The argument types of the constructor.
-> Name -- ^ Name of the type to which the constructor belongs.
-> Name -- ^ 'Con'structor name.
-> Integer -- ^ 'Con'structor arity.
-> [Q Match]
parseProduct jc tvMap argTys tName conName numArgs =
[ do arr <- newName "arr"
-- List of: "parseJSON (arr `V.unsafeIndex` <IX>)"
let x:xs = [ dispatchParseJSON jc conName tvMap argTy
`appE`
infixApp (varE arr)
[|V.unsafeIndex|]
(litE $ integerL ix)
| (argTy, ix) <- zip argTys [0 .. numArgs - 1]
]
match (conP 'Array [varP arr])
(normalB $ condE ( infixApp ([|V.length|] `appE` varE arr)
[|(==)|]
(litE $ integerL numArgs)
)
( foldl' (\a b -> infixApp a [|(<*>)|] b)
(infixApp (conE conName) [|(<$>)|] x)
xs
)
( parseTypeMismatch tName conName
(litE $ stringL $ "Array of length " ++ show numArgs)
( infixApp (litE $ stringL "Array of length ")
[|(++)|]
([|show . V.length|] `appE` varE arr)
)
)
)
[]
, matchFailed tName conName "Array"
]
--------------------------------------------------------------------------------
-- Parsing errors
--------------------------------------------------------------------------------
matchFailed :: Name -> Name -> String -> MatchQ
matchFailed tName conName expected = do
other <- newName "other"
match (varP other)
( normalB $ parseTypeMismatch tName conName
(litE $ stringL expected)
([|valueConName|] `appE` varE other)
)
[]
parseTypeMismatch :: Name -> Name -> ExpQ -> ExpQ -> ExpQ
parseTypeMismatch tName conName expected actual =
foldl appE
[|parseTypeMismatch'|]
[ litE $ stringL $ nameBase conName
, litE $ stringL $ show tName
, expected
, actual
]
class LookupField a where
lookupField :: (Value -> Parser a) -> String -> String
-> Object -> T.Text -> Parser a
instance OVERLAPPABLE_ LookupField a where
lookupField = lookupFieldWith
instance INCOHERENT_ LookupField (Maybe a) where
lookupField pj _ _ = parseOptionalFieldWith pj
lookupFieldWith :: (Value -> Parser a) -> String -> String
-> Object -> T.Text -> Parser a
lookupFieldWith pj tName rec obj key =
case H.lookup key obj of
Nothing -> unknownFieldFail tName rec (T.unpack key)
Just v -> pj v <?> Key key
keyValuePairWith :: (v -> Value) -> T.Text -> v -> Pair
keyValuePairWith tj name value = (name, tj value)
unknownFieldFail :: String -> String -> String -> Parser fail
unknownFieldFail tName rec key =
fail $ printf "When parsing the record %s of type %s the key %s was not present."
rec tName key
noArrayFail :: String -> String -> Parser fail
noArrayFail t o = fail $ printf "When parsing %s expected Array but got %s." t o
noObjectFail :: String -> String -> Parser fail
noObjectFail t o = fail $ printf "When parsing %s expected Object but got %s." t o
firstElemNoStringFail :: String -> String -> Parser fail
firstElemNoStringFail t o = fail $ printf "When parsing %s expected an Array of 2 elements where the first element is a String but got %s at the first element." t o
wrongPairCountFail :: String -> String -> Parser fail
wrongPairCountFail t n =
fail $ printf "When parsing %s expected an Object with a single tag/contents pair but got %s pairs."
t n
noStringFail :: String -> String -> Parser fail
noStringFail t o = fail $ printf "When parsing %s expected String but got %s." t o
noMatchFail :: String -> String -> Parser fail
noMatchFail t o =
fail $ printf "When parsing %s expected a String with the tag of a constructor but got %s." t o
not2ElemArray :: String -> Int -> Parser fail
not2ElemArray t i = fail $ printf "When parsing %s expected an Array of 2 elements but got %i elements" t i
conNotFoundFail2ElemArray :: String -> [String] -> String -> Parser fail
conNotFoundFail2ElemArray t cs o =
fail $ printf "When parsing %s expected a 2-element Array with a tag and contents element where the tag is one of [%s], but got %s."
t (intercalate ", " cs) o
conNotFoundFailObjectSingleField :: String -> [String] -> String -> Parser fail
conNotFoundFailObjectSingleField t cs o =
fail $ printf "When parsing %s expected an Object with a single tag/contents pair where the tag is one of [%s], but got %s."
t (intercalate ", " cs) o
conNotFoundFailTaggedObject :: String -> [String] -> String -> Parser fail
conNotFoundFailTaggedObject t cs o =
fail $ printf "When parsing %s expected an Object with a tag field where the value is one of [%s], but got %s."
t (intercalate ", " cs) o
parseTypeMismatch' :: String -> String -> String -> String -> Parser fail
parseTypeMismatch' conName tName expected actual =
fail $ printf "When parsing the constructor %s of type %s expected %s but got %s."
conName tName expected actual
--------------------------------------------------------------------------------
-- Shared ToJSON and FromJSON code
--------------------------------------------------------------------------------
-- | Functionality common to 'deriveJSON', 'deriveJSON1', and 'deriveJSON2'.
deriveJSONBoth :: (Options -> Name -> Q [Dec])
-- ^ Function which derives a flavor of 'ToJSON'.
-> (Options -> Name -> Q [Dec])
-- ^ Function which derives a flavor of 'FromJSON'.
-> Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate 'ToJSON' and 'FromJSON'
-- instances.
-> Q [Dec]
deriveJSONBoth dtj dfj opts name =
liftM2 (++) (dtj opts name) (dfj opts name)
-- | Functionality common to @deriveToJSON(1)(2)@ and @deriveFromJSON(1)(2)@.
deriveJSONClass :: [(JSONFun, JSONClass -> Name -> Options -> [Con] -> Q Exp)]
-- ^ The class methods and the functions which derive them.
-> JSONClass
-- ^ The class for which to generate an instance.
-> Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a class instance
-- declaration.
-> Q [Dec]
deriveJSONClass consFuns jc opts name =
withType name $ \name' ctxt tvbs cons mbTys ->
(:[]) <$> fromCons name' ctxt tvbs cons mbTys
where
fromCons :: Name -> Cxt -> [TyVarBndr] -> [Con] -> Maybe [Type] -> Q Dec
fromCons name' ctxt tvbs cons mbTys = do
(instanceCxt, instanceType)
<- buildTypeInstance name' jc ctxt tvbs mbTys
instanceD (return instanceCxt)
(return instanceType)
(methodDecs name' cons)
methodDecs :: Name -> [Con] -> [Q Dec]
methodDecs name' cons = flip map consFuns $ \(jf, jfMaker) ->
funD (jsonFunValName jf (arity jc))
[ clause []
(normalB $ jfMaker jc name' opts cons)
[]
]
mkFunCommon :: (JSONClass -> Name -> Options -> [Con] -> Q Exp)
-- ^ The function which derives the expression.
-> JSONClass
-- ^ Which class's method is being derived.
-> Options
-- ^ Encoding options.
-> Name
-- ^ Name of the encoded type.
-> Q Exp
mkFunCommon consFun jc opts name = withType name fromCons
where
fromCons :: Name -> Cxt -> [TyVarBndr] -> [Con] -> Maybe [Type] -> Q Exp
fromCons name' ctxt tvbs cons mbTys = do
-- We force buildTypeInstance here since it performs some checks for whether
-- or not the provided datatype's kind matches the derived method's
-- typeclass, and produces errors if it can't.
!_ <- buildTypeInstance name' jc ctxt tvbs mbTys
consFun jc name' opts cons
dispatchFunByType :: JSONClass
-> JSONFun
-> Name
-> TyVarMap
-> Bool -- True if we are using the function argument that works
-- on lists (e.g., [a] -> Value). False is we are using
-- the function argument that works on single values
-- (e.g., a -> Value).
-> Type
-> Q Exp
dispatchFunByType _ jf _ tvMap list (VarT tyName) =
varE $ case M.lookup tyName tvMap of
Just (tfjExp, tfjlExp) -> if list then tfjlExp else tfjExp
Nothing -> jsonFunValOrListName list jf Arity0
dispatchFunByType jc jf conName tvMap list (SigT ty _) =
dispatchFunByType jc jf conName tvMap list ty
dispatchFunByType jc jf conName tvMap list (ForallT _ _ ty) =
dispatchFunByType jc jf conName tvMap list ty
dispatchFunByType jc jf conName tvMap list ty = do
let tyCon :: Type
tyArgs :: [Type]
tyCon :| tyArgs = unapplyTy ty
numLastArgs :: Int
numLastArgs = min (arityInt jc) (length tyArgs)
lhsArgs, rhsArgs :: [Type]
(lhsArgs, rhsArgs) = splitAt (length tyArgs - numLastArgs) tyArgs
tyVarNames :: [Name]
tyVarNames = M.keys tvMap
itf <- isTyFamily tyCon
if any (`mentionsName` tyVarNames) lhsArgs
|| itf && any (`mentionsName` tyVarNames) tyArgs
then outOfPlaceTyVarError jc conName
else if any (`mentionsName` tyVarNames) rhsArgs
then appsE $ varE (jsonFunValOrListName list jf $ toEnum numLastArgs)
: zipWith (dispatchFunByType jc jf conName tvMap)
(cycle [False,True])
(interleave rhsArgs rhsArgs)
else varE $ jsonFunValOrListName list jf Arity0
dispatchToJSON, dispatchToEncoding, dispatchParseJSON
:: JSONClass -> Name -> TyVarMap -> Type -> Q Exp
dispatchToJSON jc n tvMap = dispatchFunByType jc ToJSON n tvMap False
dispatchToEncoding jc n tvMap = dispatchFunByType jc ToEncoding n tvMap False
dispatchParseJSON jc n tvMap = dispatchFunByType jc ParseJSON n tvMap False
--------------------------------------------------------------------------------
-- Utility functions
--------------------------------------------------------------------------------
-- | Boilerplate for top level splices.
--
-- The given 'Name' must meet one of two criteria:
--
-- 1. It must be the name of a type constructor of a plain data type or newtype.
-- 2. It must be the name of a data family instance or newtype instance constructor.
-- Any other value will result in an exception.
withType :: Name
-> (Name -> Cxt -> [TyVarBndr] -> [Con] -> Maybe [Type] -> Q a)
-- ^ Function that generates the actual code. Will be applied
-- to the datatype/data family 'Name', datatype context, type
-- variable binders and constructors extracted from the given
-- 'Name'. If the 'Name' is from a data family instance
-- constructor, it will also have its instantiated types;
-- otherwise, it will be 'Nothing'.
-> Q a
-- ^ Resulting value in the 'Q'uasi monad.
withType name f = do
info <- reify name
case info of
TyConI dec ->
case dec of
#if MIN_VERSION_template_haskell(2,11,0)
DataD ctxt _ tvbs _ cons _ -> f name ctxt tvbs cons Nothing
NewtypeD ctxt _ tvbs _ con _ -> f name ctxt tvbs [con] Nothing
#else
DataD ctxt _ tvbs cons _ -> f name ctxt tvbs cons Nothing
NewtypeD ctxt _ tvbs con _ -> f name ctxt tvbs [con] Nothing
#endif
other -> fail $ ns ++ "Unsupported type: " ++ show other
#if MIN_VERSION_template_haskell(2,11,0)
DataConI _ _ parentName -> do
#else
DataConI _ _ parentName _ -> do
#endif
parentInfo <- reify parentName
case parentInfo of
#if MIN_VERSION_template_haskell(2,11,0)
FamilyI (DataFamilyD _ tvbs _) decs ->
#else
FamilyI (FamilyD DataFam _ tvbs _) decs ->
#endif
let instDec = flip find decs $ \dec -> case dec of
#if MIN_VERSION_template_haskell(2,11,0)
DataInstD _ _ _ _ cons _ -> any ((name ==) . getConName) cons
NewtypeInstD _ _ _ _ con _ -> name == getConName con
#else
DataInstD _ _ _ cons _ -> any ((name ==) . getConName) cons
NewtypeInstD _ _ _ con _ -> name == getConName con
#endif
_ -> error $ ns ++ "Must be a data or newtype instance."
in case instDec of
#if MIN_VERSION_template_haskell(2,11,0)
Just (DataInstD ctxt _ instTys _ cons _) -> f parentName ctxt tvbs cons $ Just instTys
Just (NewtypeInstD ctxt _ instTys _ con _) -> f parentName ctxt tvbs [con] $ Just instTys
#else
Just (DataInstD ctxt _ instTys cons _) -> f parentName ctxt tvbs cons $ Just instTys
Just (NewtypeInstD ctxt _ instTys con _) -> f parentName ctxt tvbs [con] $ Just instTys
#endif
_ -> fail $ ns ++
"Could not find data or newtype instance constructor."
_ -> fail $ ns ++ "Data constructor " ++ show name ++
" is not from a data family instance constructor."
#if MIN_VERSION_template_haskell(2,11,0)
FamilyI DataFamilyD{} _ ->
#else
FamilyI (FamilyD DataFam _ _ _) _ ->
#endif
fail $ ns ++
"Cannot use a data family name. Use a data family instance constructor instead."
_ -> fail $ ns ++ "I need the name of a plain data type constructor, "
++ "or a data family instance constructor."
where
ns :: String
ns = "Data.Aeson.TH.withType: "
-- | Infer the context and instance head needed for a FromJSON or ToJSON instance.
buildTypeInstance :: Name
-- ^ The type constructor or data family name
-> JSONClass
-- ^ The typeclass to derive
-> Cxt
-- ^ The datatype context
-> [TyVarBndr]
-- ^ The type variables from the data type/data family declaration
-> Maybe [Type]
-- ^ 'Just' the types used to instantiate a data family instance,
-- or 'Nothing' if it's a plain data type
-> Q (Cxt, Type)
-- ^ The resulting 'Cxt' and 'Type' to use in a class instance
-- Plain data type/newtype case
buildTypeInstance tyConName jc dataCxt tvbs Nothing =
let varTys :: [Type]
varTys = map tvbToType tvbs
in buildTypeInstanceFromTys tyConName jc dataCxt varTys False
-- Data family instance case
--
-- The CPP is present to work around a couple of annoying old GHC bugs.
-- See Note [Polykinded data families in Template Haskell]
buildTypeInstance dataFamName jc dataCxt tvbs (Just instTysAndKinds) = do
#if !(MIN_VERSION_template_haskell(2,8,0)) || MIN_VERSION_template_haskell(2,10,0)
let instTys :: [Type]
instTys = zipWith stealKindForType tvbs instTysAndKinds
#else
let kindVarNames :: [Name]
kindVarNames = nub $ concatMap (tyVarNamesOfType . tvbKind) tvbs
numKindVars :: Int
numKindVars = length kindVarNames
givenKinds, givenKinds' :: [Kind]
givenTys :: [Type]
(givenKinds, givenTys) = splitAt numKindVars instTysAndKinds
givenKinds' = map sanitizeStars givenKinds
-- A GHC 7.6-specific bug requires us to replace all occurrences of
-- (ConT GHC.Prim.*) with StarT, or else Template Haskell will reject it.
-- Luckily, (ConT GHC.Prim.*) only seems to occur in this one spot.
sanitizeStars :: Kind -> Kind
sanitizeStars = go
where
go :: Kind -> Kind
go (AppT t1 t2) = AppT (go t1) (go t2)
go (SigT t k) = SigT (go t) (go k)
go (ConT n) | n == starKindName = StarT
go t = t
-- It's quite awkward to import * from GHC.Prim, so we'll just
-- hack our way around it.
starKindName :: Name
starKindName = mkNameG_tc "ghc-prim" "GHC.Prim" "*"
-- If we run this code with GHC 7.8, we might have to generate extra type
-- variables to compensate for any type variables that Template Haskell
-- eta-reduced away.
-- See Note [Polykinded data families in Template Haskell]
xTypeNames <- newNameList "tExtra" (length tvbs - length givenTys)
let xTys :: [Type]
xTys = map VarT xTypeNames
-- ^ Because these type variables were eta-reduced away, we can only
-- determine their kind by using stealKindForType. Therefore, we mark
-- them as VarT to ensure they will be given an explicit kind annotation
-- (and so the kind inference machinery has the right information).
substNamesWithKinds :: [(Name, Kind)] -> Type -> Type
substNamesWithKinds nks t = foldr' (uncurry substNameWithKind) t nks
-- The types from the data family instance might not have explicit kind
-- annotations, which the kind machinery needs to work correctly. To
-- compensate, we use stealKindForType to explicitly annotate any
-- types without kind annotations.
instTys :: [Type]
instTys = map (substNamesWithKinds (zip kindVarNames givenKinds'))
-- Note that due to a GHC 7.8-specific bug
-- (see Note [Polykinded data families in Template Haskell]),
-- there may be more kind variable names than there are kinds
-- to substitute. But this is OK! If a kind is eta-reduced, it
-- means that is was not instantiated to something more specific,
-- so we need not substitute it. Using stealKindForType will
-- grab the correct kind.
$ zipWith stealKindForType tvbs (givenTys ++ xTys)
#endif
buildTypeInstanceFromTys dataFamName jc dataCxt instTys True
-- For the given Types, generate an instance context and head.
buildTypeInstanceFromTys :: Name
-- ^ The type constructor or data family name
-> JSONClass
-- ^ The typeclass to derive
-> Cxt
-- ^ The datatype context
-> [Type]
-- ^ The types to instantiate the instance with
-> Bool
-- ^ True if it's a data family, False otherwise
-> Q (Cxt, Type)
buildTypeInstanceFromTys tyConName jc dataCxt varTysOrig isDataFamily = do
-- Make sure to expand through type/kind synonyms! Otherwise, the
-- eta-reduction check might get tripped up over type variables in a
-- synonym that are actually dropped.
-- (See GHC Trac #11416 for a scenario where this actually happened.)
varTysExp <- mapM expandSyn varTysOrig
let remainingLength :: Int
remainingLength = length varTysOrig - arityInt jc
droppedTysExp :: [Type]
droppedTysExp = drop remainingLength varTysExp
droppedStarKindStati :: [StarKindStatus]
droppedStarKindStati = map canRealizeKindStar droppedTysExp
-- Check there are enough types to drop and that all of them are either of
-- kind * or kind k (for some kind variable k). If not, throw an error.
when (remainingLength < 0 || elem NotKindStar droppedStarKindStati) $
derivingKindError jc tyConName
let droppedKindVarNames :: [Name]
droppedKindVarNames = catKindVarNames droppedStarKindStati
-- Substitute kind * for any dropped kind variables
varTysExpSubst :: [Type]
varTysExpSubst = map (substNamesWithKindStar droppedKindVarNames) varTysExp
remainingTysExpSubst, droppedTysExpSubst :: [Type]
(remainingTysExpSubst, droppedTysExpSubst) =
splitAt remainingLength varTysExpSubst
-- All of the type variables mentioned in the dropped types
-- (post-synonym expansion)
droppedTyVarNames :: [Name]
droppedTyVarNames = concatMap tyVarNamesOfType droppedTysExpSubst
-- If any of the dropped types were polykinded, ensure that they are of kind *
-- after substituting * for the dropped kind variables. If not, throw an error.
unless (all hasKindStar droppedTysExpSubst) $
derivingKindError jc tyConName
let preds :: [Maybe Pred]
kvNames :: [[Name]]
kvNames' :: [Name]
-- Derive instance constraints (and any kind variables which are specialized
-- to * in those constraints)
(preds, kvNames) = unzip $ map (deriveConstraint jc) remainingTysExpSubst
kvNames' = concat kvNames
-- Substitute the kind variables specialized in the constraints with *
remainingTysExpSubst' :: [Type]
remainingTysExpSubst' =
map (substNamesWithKindStar kvNames') remainingTysExpSubst
-- We now substitute all of the specialized-to-* kind variable names with
-- *, but in the original types, not the synonym-expanded types. The reason
-- we do this is a superficial one: we want the derived instance to resemble
-- the datatype written in source code as closely as possible. For example,
-- for the following data family instance:
--
-- data family Fam a
-- newtype instance Fam String = Fam String
--
-- We'd want to generate the instance:
--
-- instance C (Fam String)
--
-- Not:
--
-- instance C (Fam [Char])
remainingTysOrigSubst :: [Type]
remainingTysOrigSubst =
map (substNamesWithKindStar (union droppedKindVarNames kvNames'))
$ take remainingLength varTysOrig
remainingTysOrigSubst' :: [Type]
-- See Note [Kind signatures in derived instances] for an explanation
-- of the isDataFamily check.
remainingTysOrigSubst' =
if isDataFamily
then remainingTysOrigSubst
else map unSigT remainingTysOrigSubst
instanceCxt :: Cxt
instanceCxt = catMaybes preds
instanceType :: Type
instanceType = AppT (ConT $ jsonClassName jc)
$ applyTyCon tyConName remainingTysOrigSubst'
-- If the datatype context mentions any of the dropped type variables,
-- we can't derive an instance, so throw an error.
when (any (`predMentionsName` droppedTyVarNames) dataCxt) $
datatypeContextError tyConName instanceType
-- Also ensure the dropped types can be safely eta-reduced. Otherwise,
-- throw an error.
unless (canEtaReduce remainingTysExpSubst' droppedTysExpSubst) $
etaReductionError instanceType
return (instanceCxt, instanceType)
-- | Attempt to derive a constraint on a Type. If successful, return
-- Just the constraint and any kind variable names constrained to *.
-- Otherwise, return Nothing and the empty list.
--
-- See Note [Type inference in derived instances] for the heuristics used to
-- come up with constraints.
deriveConstraint :: JSONClass -> Type -> (Maybe Pred, [Name])
deriveConstraint jc t
| not (isTyVar t) = (Nothing, [])
| hasKindStar t = (Just (applyCon (jcConstraint Arity0) tName), [])
| otherwise = case hasKindVarChain 1 t of
Just ns | jcArity >= Arity1
-> (Just (applyCon (jcConstraint Arity1) tName), ns)
_ -> case hasKindVarChain 2 t of
Just ns | jcArity == Arity2
-> (Just (applyCon (jcConstraint Arity2) tName), ns)
_ -> (Nothing, [])
where
tName :: Name
tName = varTToName t
jcArity :: Arity
jcArity = arity jc
jcConstraint :: Arity -> Name
jcConstraint = jsonClassName . JSONClass (direction jc)
{-
Note [Polykinded data families in Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to come up with the correct instance context and head for an instance, e.g.,
instance C a => C (Data a) where ...
We need to know the exact types and kinds used to instantiate the instance. For
plain old datatypes, this is simple: every type must be a type variable, and
Template Haskell reliably tells us the type variables and their kinds.
Doing the same for data families proves to be much harder for three reasons:
1. On any version of Template Haskell, it may not tell you what an instantiated
type's kind is. For instance, in the following data family instance:
data family Fam (f :: * -> *) (a :: *)
data instance Fam f a
Then if we use TH's reify function, it would tell us the TyVarBndrs of the
data family declaration are:
[KindedTV f (AppT (AppT ArrowT StarT) StarT),KindedTV a StarT]
and the instantiated types of the data family instance are:
[VarT f1,VarT a1]
We can't just pass [VarT f1,VarT a1] to buildTypeInstanceFromTys, since we
have no way of knowing their kinds. Luckily, the TyVarBndrs tell us what the
kind is in case an instantiated type isn't a SigT, so we use the stealKindForType
function to ensure all of the instantiated types are SigTs before passing them
to buildTypeInstanceFromTys.
2. On GHC 7.6 and 7.8, a bug is present in which Template Haskell lists all of
the specified kinds of a data family instance efore any of the instantiated
types. Fortunately, this is easy to deal with: you simply count the number of
distinct kind variables in the data family declaration, take that many elements
from the front of the Types list of the data family instance, substitute the
kind variables with their respective instantiated kinds (which you took earlier),
and proceed as normal.
3. On GHC 7.8, an even uglier bug is present (GHC Trac #9692) in which Template
Haskell might not even list all of the Types of a data family instance, since
they are eta-reduced away! And yes, kinds can be eta-reduced too.
The simplest workaround is to count how many instantiated types are missing from
the list and generate extra type variables to use in their place. Luckily, we
needn't worry much if its kind was eta-reduced away, since using stealKindForType
will get it back.
Note [Kind signatures in derived instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is possible to put explicit kind signatures into the derived instances, e.g.,
instance C a => C (Data (f :: * -> *)) where ...
But it is preferable to avoid this if possible. If we come up with an incorrect
kind signature (which is entirely possible, since Template Haskell doesn't always
have the best track record with reifying kind signatures), then GHC will flat-out
reject the instance, which is quite unfortunate.
Plain old datatypes have the advantage that you can avoid using any kind signatures
at all in their instances. This is because a datatype declaration uses all type
variables, so the types that we use in a derived instance uniquely determine their
kinds. As long as we plug in the right types, the kind inferencer can do the rest
of the work. For this reason, we use unSigT to remove all kind signatures before
splicing in the instance context and head.
Data family instances are trickier, since a data family can have two instances that
are distinguished by kind alone, e.g.,
data family Fam (a :: k)
data instance Fam (a :: * -> *)
data instance Fam (a :: *)
If we dropped the kind signatures for C (Fam a), then GHC will have no way of
knowing which instance we are talking about. To avoid this scenario, we always
include explicit kind signatures in data family instances. There is a chance that
the inferred kind signatures will be incorrect, but if so, we can always fall back
on the mk- functions.
Note [Type inference in derived instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Type inference is can be tricky to get right, and we want to avoid recreating the
entirety of GHC's type inferencer in Template Haskell. For this reason, we will
probably never come up with derived instance contexts that are as accurate as
GHC's. But that doesn't mean we can't do anything! There are a couple of simple
things we can do to make instance contexts that work for 80% of use cases:
1. If one of the last type parameters is polykinded, then its kind will be
specialized to * in the derived instance. We note what kind variable the type
parameter had and substitute it with * in the other types as well. For example,
imagine you had
data Data (a :: k) (b :: k)
Then you'd want to derived instance to be:
instance C (Data (a :: *))
Not:
instance C (Data (a :: k))
2. We naïvely come up with instance constraints using the following criteria:
(i) If there's a type parameter n of kind *, generate a ToJSON n/FromJSON n
constraint.
(ii) If there's a type parameter n of kind k1 -> k2 (where k1/k2 are * or kind
variables), then generate a ToJSON1 n/FromJSON1 n constraint, and if
k1/k2 are kind variables, then substitute k1/k2 with * elsewhere in the
types. We must consider the case where they are kind variables because
you might have a scenario like this:
newtype Compose (f :: k2 -> *) (g :: k1 -> k2) (a :: k1)
= Compose (f (g a))
Which would have a derived ToJSON1 instance of:
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Compose f g) where ...
(iii) If there's a type parameter n of kind k1 -> k2 -> k3 (where k1/k2/k3 are
* or kind variables), then generate a ToJSON2 n/FromJSON2 n constraint
and perform kind substitution as in the other cases.
-}
-- Determines the types of a constructor's arguments as well as the last type
-- parameters (mapped to their encoding/decoding functions), expanding through
-- any type synonyms.
--
-- The type parameters are determined on a constructor-by-constructor basis since
-- they may be refined to be particular types in a GADT.
reifyConTys :: JSONClass
-> [(Name, Name)]
-> Name
-> Q ([Type], TyVarMap)
reifyConTys jc tpjs conName = do
info <- reify conName
(ctxt, uncTy) <- case info of
DataConI _ ty _
#if !(MIN_VERSION_template_haskell(2,11,0))
_
#endif
-> fmap uncurryTy (expandSyn ty)
_ -> error "Must be a data constructor"
let (argTys, [resTy]) = NE.splitAt (NE.length uncTy - 1) uncTy
unapResTy = unapplyTy resTy
-- If one of the last type variables is refined to a particular type
-- (i.e., not truly polymorphic), we mark it with Nothing and filter
-- it out later, since we only apply encoding/decoding functions to
-- arguments of a type that it (1) one of the last type variables,
-- and (2) of a truly polymorphic type.
jArity = arityInt jc
mbTvNames = map varTToNameMaybe $
NE.drop (NE.length unapResTy - jArity) unapResTy
-- We use M.fromList to ensure that if there are any duplicate type
-- variables (as can happen in a GADT), the rightmost type variable gets
-- associated with the show function.
--
-- See Note [Matching functions with GADT type variables]
tvMap = M.fromList
. catMaybes -- Drop refined types
$ zipWith (\mbTvName tpj ->
fmap (\tvName -> (tvName, tpj)) mbTvName)
mbTvNames tpjs
if (any (`predMentionsName` M.keys tvMap) ctxt
|| M.size tvMap < jArity)
&& not (allowExQuant jc)
then existentialContextError conName
else return (argTys, tvMap)
{-
Note [Matching functions with GADT type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving ToJSON2, there is a tricky corner case to consider:
data Both a b where
BothCon :: x -> x -> Both x x
Which encoding functions should be applied to which arguments of BothCon?
We have a choice, since both the function of type (a -> Value) and of type
(b -> Value) can be applied to either argument. In such a scenario, the
second encoding function takes precedence over the first encoding function, so the
derived ToJSON2 instance would be something like:
instance ToJSON2 Both where
liftToJSON2 tj1 tj2 p (BothCon x1 x2) = Array $ create $ do
mv <- unsafeNew 2
unsafeWrite mv 0 (tj1 x1)
unsafeWrite mv 1 (tj2 x2)
return mv
This is not an arbitrary choice, as this definition ensures that
liftToJSON2 toJSON = liftToJSON for a derived ToJSON1 instance for
Both.
-}
-- A mapping of type variable Names to their encoding/decoding function Names.
-- For example, in a ToJSON2 declaration, a TyVarMap might look like
--
-- { a ~> (tj1, tjl1)
-- , b ~> (tj2, tjl2) }
--
-- where a and b are the last two type variables of the datatype, tj1 and tjl1 are
-- the function arguments of types (a -> Value) and ([a] -> Value), and tj2 and tjl2
-- are the function arguments of types (b -> Value) and ([b] -> Value).
type TyVarMap = Map Name (Name, Name)
-- | If a VarT is missing an explicit kind signature, steal it from a TyVarBndr.
stealKindForType :: TyVarBndr -> Type -> Type
stealKindForType tvb t@VarT{} = SigT t (tvbKind tvb)
stealKindForType _ t = t
-- | Extracts the kind from a type variable binder.
tvbKind :: TyVarBndr -> Kind
#if MIN_VERSION_template_haskell(2,8,0)
tvbKind (PlainTV _ ) = StarT
#else
tvbKind (PlainTV _ ) = StarK
#endif
tvbKind (KindedTV _ k) = k
tvbToType :: TyVarBndr -> Type
tvbToType (PlainTV n) = VarT n
tvbToType (KindedTV n k) = SigT (VarT n) k
-- | Returns True if a Type has kind *.
hasKindStar :: Type -> Bool
hasKindStar VarT{} = True
#if MIN_VERSION_template_haskell(2,8,0)
hasKindStar (SigT _ StarT) = True
#else
hasKindStar (SigT _ StarK) = True
#endif
hasKindStar _ = False
-- Returns True is a kind is equal to *, or if it is a kind variable.
isStarOrVar :: Kind -> Bool
#if MIN_VERSION_template_haskell(2,8,0)
isStarOrVar StarT = True
isStarOrVar VarT{} = True
#else
isStarOrVar StarK = True
#endif
isStarOrVar _ = False
-- Generate a list of fresh names with a common prefix, and numbered suffixes.
newNameList :: String -> Int -> Q [Name]
newNameList prefix len = mapM newName [prefix ++ show n | n <- [1..len]]
-- Gets all of the type/kind variable names mentioned somewhere in a Type.
tyVarNamesOfType :: Type -> [Name]
tyVarNamesOfType = go
where
go :: Type -> [Name]
go (AppT t1 t2) = go t1 ++ go t2
go (SigT t _k) = go t
#if MIN_VERSION_template_haskell(2,8,0)
++ go _k
#endif
go (VarT n) = [n]
go _ = []
-- | Gets all of the type/kind variable names mentioned somewhere in a Kind.
tyVarNamesOfKind :: Kind -> [Name]
#if MIN_VERSION_template_haskell(2,8,0)
tyVarNamesOfKind = tyVarNamesOfType
#else
tyVarNamesOfKind _ = [] -- There are no kind variables
#endif
-- | @hasKindVarChain n kind@ Checks if @kind@ is of the form
-- k_0 -> k_1 -> ... -> k_(n-1), where k0, k1, ..., and k_(n-1) can be * or
-- kind variables.
hasKindVarChain :: Int -> Type -> Maybe [Name]
hasKindVarChain kindArrows t =
let uk = uncurryKind (tyKind t)
in if (NE.length uk - 1 == kindArrows) && F.all isStarOrVar uk
then Just (concatMap tyVarNamesOfKind uk)
else Nothing
-- | If a Type is a SigT, returns its kind signature. Otherwise, return *.
tyKind :: Type -> Kind
tyKind (SigT _ k) = k
tyKind _ = starK
-- | Extract Just the Name from a type variable. If the argument Type is not a
-- type variable, return Nothing.
varTToNameMaybe :: Type -> Maybe Name
varTToNameMaybe (VarT n) = Just n
varTToNameMaybe (SigT t _) = varTToNameMaybe t
varTToNameMaybe _ = Nothing
-- | Extract the Name from a type variable. If the argument Type is not a
-- type variable, throw an error.
varTToName :: Type -> Name
varTToName = fromMaybe (error "Not a type variable!") . varTToNameMaybe
-- | Extracts the name from a constructor.
getConName :: Con -> Name
getConName (NormalC name _) = name
getConName (RecC name _) = name
getConName (InfixC _ name _) = name
getConName (ForallC _ _ con) = getConName con
#if MIN_VERSION_template_haskell(2,11,0)
getConName (GadtC names _ _) = head names
getConName (RecGadtC names _ _) = head names
#endif
interleave :: [a] -> [a] -> [a]
interleave (a1:a1s) (a2:a2s) = a1:a2:interleave a1s a2s
interleave _ _ = []
-- | Fully applies a type constructor to its type variables.
applyTyCon :: Name -> [Type] -> Type
applyTyCon = foldl' AppT . ConT
-- | Is the given type a variable?
isTyVar :: Type -> Bool
isTyVar (VarT _) = True
isTyVar (SigT t _) = isTyVar t
isTyVar _ = False
-- | Is the given type a type family constructor (and not a data family constructor)?
isTyFamily :: Type -> Q Bool
isTyFamily (ConT n) = do
info <- reify n
return $ case info of
#if MIN_VERSION_template_haskell(2,11,0)
FamilyI OpenTypeFamilyD{} _ -> True
#else
FamilyI (FamilyD TypeFam _ _ _) _ -> True
#endif
#if MIN_VERSION_template_haskell(2,9,0)
FamilyI ClosedTypeFamilyD{} _ -> True
#endif
_ -> False
isTyFamily _ = return False
-- | Peel off a kind signature from a Type (if it has one).
unSigT :: Type -> Type
unSigT (SigT t _) = t
unSigT t = t
-- | Are all of the items in a list (which have an ordering) distinct?
--
-- This uses Set (as opposed to nub) for better asymptotic time complexity.
allDistinct :: Ord a => [a] -> Bool
allDistinct = allDistinct' Set.empty
where
allDistinct' :: Ord a => Set a -> [a] -> Bool
allDistinct' uniqs (x:xs)
| x `Set.member` uniqs = False
| otherwise = allDistinct' (Set.insert x uniqs) xs
allDistinct' _ _ = True
-- | Does the given type mention any of the Names in the list?
mentionsName :: Type -> [Name] -> Bool
mentionsName = go
where
go :: Type -> [Name] -> Bool
go (AppT t1 t2) names = go t1 names || go t2 names
go (SigT t _k) names = go t names
#if MIN_VERSION_template_haskell(2,8,0)
|| go _k names
#endif
go (VarT n) names = n `elem` names
go _ _ = False
-- | Does an instance predicate mention any of the Names in the list?
predMentionsName :: Pred -> [Name] -> Bool
#if MIN_VERSION_template_haskell(2,10,0)
predMentionsName = mentionsName
#else
predMentionsName (ClassP n tys) names = n `elem` names || any (`mentionsName` names) tys
predMentionsName (EqualP t1 t2) names = mentionsName t1 names || mentionsName t2 names
#endif
-- | Split an applied type into its individual components. For example, this:
--
-- @
-- Either Int Char
-- @
--
-- would split to this:
--
-- @
-- [Either, Int, Char]
-- @
unapplyTy :: Type -> NonEmpty Type
unapplyTy = NE.reverse . go
where
go :: Type -> NonEmpty Type
go (AppT t1 t2) = t2 <| go t1
go (SigT t _) = go t
go (ForallT _ _ t) = go t
go t = t :| []
-- | Split a type signature by the arrows on its spine. For example, this:
--
-- @
-- forall a b. (a ~ b) => (a -> b) -> Char -> ()
-- @
--
-- would split to this:
--
-- @
-- (a ~ b, [a -> b, Char, ()])
-- @
uncurryTy :: Type -> (Cxt, NonEmpty Type)
uncurryTy (AppT (AppT ArrowT t1) t2) =
let (ctxt, tys) = uncurryTy t2
in (ctxt, t1 <| tys)
uncurryTy (SigT t _) = uncurryTy t
uncurryTy (ForallT _ ctxt t) =
let (ctxt', tys) = uncurryTy t
in (ctxt ++ ctxt', tys)
uncurryTy t = ([], t :| [])
-- | Like uncurryType, except on a kind level.
uncurryKind :: Kind -> NonEmpty Kind
#if MIN_VERSION_template_haskell(2,8,0)
uncurryKind = snd . uncurryTy
#else
uncurryKind (ArrowK k1 k2) = k1 <| uncurryKind k2
uncurryKind k = k :| []
#endif
createKindChain :: Int -> Kind
createKindChain = go starK
where
go :: Kind -> Int -> Kind
go k !0 = k
#if MIN_VERSION_template_haskell(2,8,0)
go k !n = go (AppT (AppT ArrowT StarT) k) (n - 1)
#else
go k !n = go (ArrowK StarK k) (n - 1)
#endif
-- | Makes a string literal expression from a constructor's name.
conNameExp :: Options -> Con -> Q Exp
conNameExp opts = litE
. stringL
. constructorTagModifier opts
. nameBase
. getConName
-- | Creates a string literal expression from a record field label.
fieldLabelExp :: Options -- ^ Encoding options
-> Name
-> Q Exp
fieldLabelExp opts = litE . stringL . fieldLabelModifier opts . nameBase
-- | The name of the outermost 'Value' constructor.
valueConName :: Value -> String
valueConName (Object _) = "Object"
valueConName (Array _) = "Array"
valueConName (String _) = "String"
valueConName (Number _) = "Number"
valueConName (Bool _) = "Boolean"
valueConName Null = "Null"
applyCon :: Name -> Name -> Pred
applyCon con t =
#if MIN_VERSION_template_haskell(2,10,0)
AppT (ConT con) (VarT t)
#else
ClassP con [VarT t]
#endif
-- | Checks to see if the last types in a data family instance can be safely eta-
-- reduced (i.e., dropped), given the other types. This checks for three conditions:
--
-- (1) All of the dropped types are type variables
-- (2) All of the dropped types are distinct
-- (3) None of the remaining types mention any of the dropped types
canEtaReduce :: [Type] -> [Type] -> Bool
canEtaReduce remaining dropped =
all isTyVar dropped
&& allDistinct droppedNames -- Make sure not to pass something of type [Type], since Type
-- didn't have an Ord instance until template-haskell-2.10.0.0
&& not (any (`mentionsName` droppedNames) remaining)
where
droppedNames :: [Name]
droppedNames = map varTToName dropped
-------------------------------------------------------------------------------
-- Expanding type synonyms
-------------------------------------------------------------------------------
-- | Expands all type synonyms in a type. Written by Dan Rosén in the
-- @genifunctors@ package (licensed under BSD3).
expandSyn :: Type -> Q Type
expandSyn (ForallT tvs ctx t) = ForallT tvs ctx <$> expandSyn t
expandSyn t@AppT{} = expandSynApp t []
expandSyn t@ConT{} = expandSynApp t []
expandSyn (SigT t k) = do t' <- expandSyn t
k' <- expandSynKind k
return (SigT t' k')
expandSyn t = return t
expandSynKind :: Kind -> Q Kind
#if MIN_VERSION_template_haskell(2,8,0)
expandSynKind = expandSyn
#else
expandSynKind = return -- There are no kind synonyms to deal with
#endif
expandSynApp :: Type -> [Type] -> Q Type
expandSynApp (AppT t1 t2) ts = do
t2' <- expandSyn t2
expandSynApp t1 (t2':ts)
expandSynApp (ConT n) ts | nameBase n == "[]" = return $ foldl' AppT ListT ts
expandSynApp t@(ConT n) ts = do
info <- reify n
case info of
TyConI (TySynD _ tvs rhs) ->
let (ts', ts'') = splitAt (length tvs) ts
subs = mkSubst tvs ts'
rhs' = substType subs rhs
in expandSynApp rhs' ts''
_ -> return $ foldl' AppT t ts
expandSynApp t ts = do
t' <- expandSyn t
return $ foldl' AppT t' ts
type TypeSubst = Map Name Type
type KindSubst = Map Name Kind
mkSubst :: [TyVarBndr] -> [Type] -> TypeSubst
mkSubst vs ts =
let vs' = map un vs
un (PlainTV v) = v
un (KindedTV v _) = v
in M.fromList $ zip vs' ts
substType :: TypeSubst -> Type -> Type
substType subs (ForallT v c t) = ForallT v c $ substType subs t
substType subs t@(VarT n) = M.findWithDefault t n subs
substType subs (AppT t1 t2) = AppT (substType subs t1) (substType subs t2)
substType subs (SigT t k) = SigT (substType subs t)
#if MIN_VERSION_template_haskell(2,8,0)
(substType subs k)
#else
k
#endif
substType _ t = t
substKind :: KindSubst -> Type -> Type
#if MIN_VERSION_template_haskell(2,8,0)
substKind = substType
#else
substKind _ t = t -- There are no kind variables!
#endif
substNameWithKind :: Name -> Kind -> Type -> Type
substNameWithKind n k = substKind (M.singleton n k)
substNamesWithKindStar :: [Name] -> Type -> Type
substNamesWithKindStar ns t = foldr' (flip substNameWithKind starK) t ns
-------------------------------------------------------------------------------
-- Error messages
-------------------------------------------------------------------------------
-- | Either the given data type doesn't have enough type variables, or one of
-- the type variables to be eta-reduced cannot realize kind *.
derivingKindError :: JSONClass -> Name -> Q a
derivingKindError jc tyConName = fail
. showString "Cannot derive well-kinded instance of form ‘"
. showString className
. showChar ' '
. showParen True
( showString (nameBase tyConName)
. showString " ..."
)
. showString "‘\n\tClass "
. showString className
. showString " expects an argument of kind "
. showString (pprint . createKindChain $ arityInt jc)
$ ""
where
className :: String
className = nameBase $ jsonClassName jc
-- | One of the last type variables cannot be eta-reduced (see the canEtaReduce
-- function for the criteria it would have to meet).
etaReductionError :: Type -> Q a
etaReductionError instanceType = fail $
"Cannot eta-reduce to an instance of form \n\tinstance (...) => "
++ pprint instanceType
-- | The data type has a DatatypeContext which mentions one of the eta-reduced
-- type variables.
datatypeContextError :: Name -> Type -> Q a
datatypeContextError dataName instanceType = fail
. showString "Can't make a derived instance of ‘"
. showString (pprint instanceType)
. showString "‘:\n\tData type ‘"
. showString (nameBase dataName)
. showString "‘ must not have a class context involving the last type argument(s)"
$ ""
-- | The data type mentions one of the n eta-reduced type variables in a place other
-- than the last nth positions of a data type in a constructor's field.
outOfPlaceTyVarError :: JSONClass -> Name -> a
outOfPlaceTyVarError jc conName = error
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must only use its last "
. shows n
. showString " type variable(s) within the last "
. shows n
. showString " argument(s) of a data type"
$ ""
where
n :: Int
n = arityInt jc
-- | The data type has an existential constraint which mentions one of the
-- eta-reduced type variables.
existentialContextError :: Name -> a
existentialContextError conName = error
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must be truly polymorphic in the last argument(s) of the data type"
$ ""
-------------------------------------------------------------------------------
-- Class-specific constants
-------------------------------------------------------------------------------
-- | A representation of the arity of the ToJSON/FromJSON typeclass being derived.
data Arity = Arity0 | Arity1 | Arity2
deriving (Enum, Eq, Ord)
-- | Whether ToJSON(1)(2) or FromJSON(1)(2) is being derived.
data Direction = To | From
-- | A representation of which typeclass method is being spliced in.
data JSONFun = ToJSON | ToEncoding | ParseJSON
-- | A representation of which typeclass is being derived.
data JSONClass = JSONClass { direction :: Direction, arity :: Arity }
toJSONClass, toJSON1Class, toJSON2Class,
fromJSONClass, fromJSON1Class, fromJSON2Class :: JSONClass
toJSONClass = JSONClass To Arity0
toJSON1Class = JSONClass To Arity1
toJSON2Class = JSONClass To Arity2
fromJSONClass = JSONClass From Arity0
fromJSON1Class = JSONClass From Arity1
fromJSON2Class = JSONClass From Arity2
jsonClassName :: JSONClass -> Name
jsonClassName (JSONClass To Arity0) = ''ToJSON
jsonClassName (JSONClass To Arity1) = ''ToJSON1
jsonClassName (JSONClass To Arity2) = ''ToJSON2
jsonClassName (JSONClass From Arity0) = ''FromJSON
jsonClassName (JSONClass From Arity1) = ''FromJSON1
jsonClassName (JSONClass From Arity2) = ''FromJSON2
jsonFunValName :: JSONFun -> Arity -> Name
jsonFunValName ToJSON Arity0 = 'toJSON
jsonFunValName ToJSON Arity1 = 'liftToJSON
jsonFunValName ToJSON Arity2 = 'liftToJSON2
jsonFunValName ToEncoding Arity0 = 'toEncoding
jsonFunValName ToEncoding Arity1 = 'liftToEncoding
jsonFunValName ToEncoding Arity2 = 'liftToEncoding2
jsonFunValName ParseJSON Arity0 = 'parseJSON
jsonFunValName ParseJSON Arity1 = 'liftParseJSON
jsonFunValName ParseJSON Arity2 = 'liftParseJSON2
jsonFunListName :: JSONFun -> Arity -> Name
jsonFunListName ToJSON Arity0 = 'toJSONList
jsonFunListName ToJSON Arity1 = 'liftToJSONList
jsonFunListName ToJSON Arity2 = 'liftToJSONList2
jsonFunListName ToEncoding Arity0 = 'toEncodingList
jsonFunListName ToEncoding Arity1 = 'liftToEncodingList
jsonFunListName ToEncoding Arity2 = 'liftToEncodingList2
jsonFunListName ParseJSON Arity0 = 'parseJSONList
jsonFunListName ParseJSON Arity1 = 'liftParseJSONList
jsonFunListName ParseJSON Arity2 = 'liftParseJSONList2
jsonFunValOrListName :: Bool -- e.g., toJSONList if True, toJSON if False
-> JSONFun -> Arity -> Name
jsonFunValOrListName False = jsonFunValName
jsonFunValOrListName True = jsonFunListName
arityInt :: JSONClass -> Int
arityInt = fromEnum . arity
allowExQuant :: JSONClass -> Bool
allowExQuant (JSONClass To _) = True
allowExQuant _ = False
-------------------------------------------------------------------------------
-- StarKindStatus
-------------------------------------------------------------------------------
-- | Whether a type is not of kind *, is of kind *, or is a kind variable.
data StarKindStatus = NotKindStar
| KindStar
| IsKindVar Name
deriving Eq
-- | Does a Type have kind * or k (for some kind variable k)?
canRealizeKindStar :: Type -> StarKindStatus
canRealizeKindStar t
| hasKindStar t = KindStar
| otherwise = case t of
#if MIN_VERSION_template_haskell(2,8,0)
SigT _ (VarT k) -> IsKindVar k
#endif
_ -> NotKindStar
-- | Returns 'Just' the kind variable 'Name' of a 'StarKindStatus' if it exists.
-- Otherwise, returns 'Nothing'.
starKindStatusToName :: StarKindStatus -> Maybe Name
starKindStatusToName (IsKindVar n) = Just n
starKindStatusToName _ = Nothing
-- | Concat together all of the StarKindStatuses that are IsKindVar and extract
-- the kind variables' Names out.
catKindVarNames :: [StarKindStatus] -> [Name]
catKindVarNames = mapMaybe starKindStatusToName
| tolysz/prepare-ghcjs | spec-lts8/aeson/Data/Aeson/TH.hs | bsd-3-clause | 97,103 | 4 | 28 | 30,176 | 18,108 | 9,669 | 8,439 | -1 | -1 |
-- Nasty hack, demonstatrates how to use HaskellDB's internals to
-- access non-standard database features.
import Database.HaskellDB
import Database.HaskellDB.DBLayout
import Database.HaskellDB.Database
import Database.HaskellDB.Query
import Database.HaskellDB.PrimQuery
import Database.HaskellDB.Sql hiding (tables)
import TestConnect
import Data.Maybe
import System.Time
now :: Expr CalendarTime
now = Expr (ConstExpr (OtherLit "NOW()"))
data Timefield = Timefield
instance FieldTag Timefield where fieldName _ = "timefield"
timefield = mkAttr Timefield :: Attr Timefield CalendarTime
q = project (timefield << now)
getTime :: Database -> IO CalendarTime
getTime db = do
(r:_) <- query db q
return (r!timefield)
printTime db = do
putStrLn $ show $ showSql q
t <- getTime db
putStrLn $ calendarTimeToString t
main = argConnect printTime | m4dc4p/haskelldb | test/old/current-time.hs | bsd-3-clause | 883 | 0 | 9 | 154 | 241 | 126 | 115 | 24 | 1 |
--------------------------------------------------------------------
-- |
-- Module : Text.Atom.Pub
-- Copyright : (c) Galois, Inc. 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
-- Portability:
--
-- Types for the Atom Publishing Protocol (APP)
--
--------------------------------------------------------------------
module Text.Atom.Pub where
import Text.XML.Light.Types as XML
import Text.Atom.Feed ( TextContent, Category, URI )
data Service
= Service
{ serviceWorkspaces :: [Workspace]
, serviceOther :: [XML.Element]
}
data Workspace
= Workspace
{ workspaceTitle :: TextContent
, workspaceCols :: [Collection]
, workspaceOther :: [XML.Element]
}
data Collection
= Collection
{ collectionURI :: URI
, collectionTitle :: TextContent
, collectionAccept :: [Accept]
, collectionCats :: [Categories]
, collectionOther :: [XML.Element]
}
data Categories
= CategoriesExternal URI
| Categories (Maybe Bool) (Maybe URI) [Category]
deriving (Show)
newtype Accept = Accept { acceptType :: String }
| GaloisInc/feed | Text/Atom/Pub.hs | bsd-3-clause | 1,138 | 0 | 10 | 225 | 213 | 139 | 74 | 24 | 0 |
{-# LANGUAGE PolyKinds, DataKinds, MagicHash, ScopedTypeVariables, MultiParamTypeClasses #-}
module Eta.Interop (SObject(..)) where
import Data.Proxy
import GHC.Base
import GHC.TypeLits
data {-# CLASS "java.lang.Object" #-} SObject (s :: Symbol) = SObject (Object# (SObject s))
instance (KnownSymbol s) => Class (SObject s) where
unobj (SObject s) = s
obj s = SObject s
classIdentifier _ = symbolVal (Proxy :: Proxy s)
class Overloadable (a :: k) (s :: Symbol) r where
overloaded :: Proxy# a -> Proxy# s -> r
class Constructor r where
new :: r
| rahulmutt/ghcvm | libraries/base/Eta/Interop.hs | bsd-3-clause | 559 | 0 | 10 | 99 | 186 | 101 | 85 | 14 | 0 |
-- |
-- Module : Data.ASN1.BinaryEncoding
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- A module containing ASN1 BER and DER specification encoding/decoding.
--
{-# LANGUAGE EmptyDataDecls #-}
module Data.ASN1.BinaryEncoding
( BER(..)
, DER(..)
) where
import Data.ASN1.Stream
import Data.ASN1.Types
import Data.ASN1.Encoding
import Data.ASN1.BinaryEncoding.Parse
import Data.ASN1.BinaryEncoding.Writer
import Data.ASN1.Prim
import qualified Control.Exception as E
-- | Basic Encoding Rules (BER)
data BER = BER
-- | Distinguished Encoding Rules (DER)
data DER = DER
instance ASN1DecodingRepr BER where
decodeASN1Repr _ lbs = decodeEventASN1Repr (const Nothing) `fmap` parseLBS lbs
instance ASN1Decoding BER where
decodeASN1 _ lbs = (map fst . decodeEventASN1Repr (const Nothing)) `fmap` parseLBS lbs
instance ASN1DecodingRepr DER where
decodeASN1Repr _ lbs = decodeEventASN1Repr checkDER `fmap` parseLBS lbs
instance ASN1Decoding DER where
decodeASN1 _ lbs = (map fst . decodeEventASN1Repr checkDER) `fmap` parseLBS lbs
instance ASN1Encoding DER where
encodeASN1 _ l = toLazyByteString $ encodeToRaw l
decodeConstruction :: ASN1Header -> ASN1ConstructionType
decodeConstruction (ASN1Header Universal 0x10 _ _) = Sequence
decodeConstruction (ASN1Header Universal 0x11 _ _) = Set
decodeConstruction (ASN1Header c t _ _) = Container c t
decodeEventASN1Repr :: (ASN1Header -> Maybe ASN1Error) -> [ASN1Event] -> [ASN1Repr]
decodeEventASN1Repr checkHeader l = loop [] l
where loop _ [] = []
loop acc (h@(Header hdr@(ASN1Header _ _ True _)):ConstructionBegin:xs) =
let ctype = decodeConstruction hdr in
case checkHeader hdr of
Nothing -> (Start ctype,[h,ConstructionBegin]) : loop (ctype:acc) xs
Just err -> E.throw err
loop acc (h@(Header hdr@(ASN1Header _ _ False _)):p@(Primitive prim):xs) =
case checkHeader hdr of
Nothing -> case decodePrimitive hdr prim of
Left err -> E.throw err
Right obj -> (obj, [h,p]) : loop acc xs
Just err -> E.throw err
loop (ctype:acc) (ConstructionEnd:xs) = (End ctype, [ConstructionEnd]) : loop acc xs
loop _ (x:_) = E.throw $ StreamUnexpectedSituation (show x)
-- | DER header need to be all of finite size and of minimum possible size.
checkDER :: ASN1Header -> Maybe ASN1Error
checkDER (ASN1Header _ _ _ len) = checkLength len
where checkLength :: ASN1Length -> Maybe ASN1Error
checkLength LenIndefinite = Just $ PolicyFailed "DER" "indefinite length not allowed"
checkLength (LenShort _) = Nothing
checkLength (LenLong n i)
| n == 1 && i < 0x80 = Just $ PolicyFailed "DER" "long length should be a short length"
| n == 1 && i >= 0x80 = Nothing
| otherwise = if i >= 2^((n-1)*8) && i < 2^(n*8)
then Nothing
else Just $ PolicyFailed "DER" "long length is not shortest"
encodeToRaw :: [ASN1] -> [ASN1Event]
encodeToRaw = concatMap writeTree . mkTree
where writeTree (p@(Start _),children) = snd $ encodeConstructed p children
writeTree (p,_) = snd $ encodePrimitive p
mkTree [] = []
mkTree (x@(Start _):xs) =
let (tree, r) = spanEnd 0 xs
in (x,tree):mkTree r
mkTree (p:xs) = (p,[]) : mkTree xs
spanEnd :: Int -> [ASN1] -> ([ASN1], [ASN1])
spanEnd _ [] = ([], [])
spanEnd 0 (x@(End _):xs) = ([x], xs)
spanEnd lvl (x:xs) = case x of
Start _ -> let (ys, zs) = spanEnd (lvl+1) xs in (x:ys, zs)
End _ -> let (ys, zs) = spanEnd (lvl-1) xs in (x:ys, zs)
_ -> let (ys, zs) = spanEnd lvl xs in (x:ys, zs)
| mboes/hs-asn1 | data/Data/ASN1/BinaryEncoding.hs | bsd-3-clause | 4,064 | 0 | 16 | 1,184 | 1,375 | 723 | 652 | 70 | 8 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Module1_Consts where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq, succ,
pred, enumFrom, enumFromThen, enumFromThenTo,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import Data.List
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
import qualified Module1_Types
c1 :: Module1_Types.Struct
c1 = Module1_Types.default_Struct{Module1_Types.struct_first = 201, Module1_Types.struct_second = "module1_str"}
e1s :: Vector.Vector Module1_Types.Enum
e1s = (Vector.fromList [Module1_Types.ONE,Module1_Types.THREE])
| getyourguide/fbthrift | thrift/compiler/test/fixtures/qualified/gen-hs/Module1_Consts.hs | apache-2.0 | 2,426 | 0 | 8 | 424 | 488 | 343 | 145 | 43 | 1 |
module Distribution.Client.Dependency.Modular.IndexConversion (
convPIs
-- * TODO: The following don't actually seem to be used anywhere?
, convIPI
, convSPI
, convPI
) where
import Data.List as L
import Data.Map as M
import Data.Maybe
import Prelude hiding (pi)
import qualified Distribution.Client.PackageIndex as CI
import Distribution.Client.Types
import Distribution.Client.ComponentDeps (Component(..))
import Distribution.Compiler
import Distribution.InstalledPackageInfo as IPI
import Distribution.Package -- from Cabal
import Distribution.PackageDescription as PD -- from Cabal
import qualified Distribution.Simple.PackageIndex as SI
import Distribution.System
import Distribution.Client.Dependency.Modular.Dependency as D
import Distribution.Client.Dependency.Modular.Flag as F
import Distribution.Client.Dependency.Modular.Index
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.Tree
import Distribution.Client.Dependency.Modular.Version
-- | Convert both the installed package index and the source package
-- index into one uniform solver index.
--
-- We use 'allPackagesBySourcePackageId' for the installed package index
-- because that returns us several instances of the same package and version
-- in order of preference. This allows us in principle to \"shadow\"
-- packages if there are several installed packages of the same version.
-- There are currently some shortcomings in both GHC and Cabal in
-- resolving these situations. However, the right thing to do is to
-- fix the problem there, so for now, shadowing is only activated if
-- explicitly requested.
convPIs :: OS -> Arch -> CompilerInfo -> Bool -> Bool ->
SI.InstalledPackageIndex -> CI.PackageIndex SourcePackage -> Index
convPIs os arch comp sip strfl iidx sidx =
mkIndex (convIPI' sip iidx ++ convSPI' os arch comp strfl sidx)
-- | Convert a Cabal installed package index to the simpler,
-- more uniform index format of the solver.
convIPI' :: Bool -> SI.InstalledPackageIndex -> [(PN, I, PInfo)]
convIPI' sip idx =
-- apply shadowing whenever there are multiple installed packages with
-- the same version
[ maybeShadow (convIP idx pkg)
| (_pkgid, pkgs) <- SI.allPackagesBySourcePackageId idx
, (maybeShadow, pkg) <- zip (id : repeat shadow) pkgs ]
where
-- shadowing is recorded in the package info
shadow (pn, i, PInfo fdeps fds _) | sip = (pn, i, PInfo fdeps fds (Just Shadowed))
shadow x = x
convIPI :: Bool -> SI.InstalledPackageIndex -> Index
convIPI sip = mkIndex . convIPI' sip
-- | Convert a single installed package into the solver-specific format.
convIP :: SI.InstalledPackageIndex -> InstalledPackageInfo -> (PN, I, PInfo)
convIP idx ipi =
let ipid = IPI.installedComponentId ipi
i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid)
pn = pkgName (sourcePackageId ipi)
in case mapM (convIPId pn idx) (IPI.depends ipi) of
Nothing -> (pn, i, PInfo [] M.empty (Just Broken))
Just fds -> (pn, i, PInfo (setComp fds) M.empty Nothing)
where
-- We assume that all dependencies of installed packages are _library_ deps
setComp = setCompFlaggedDeps ComponentLib
-- TODO: Installed packages should also store their encapsulations!
-- | Convert dependencies specified by an installed package id into
-- flagged dependencies of the solver.
--
-- May return Nothing if the package can't be found in the index. That
-- indicates that the original package having this dependency is broken
-- and should be ignored.
convIPId :: PN -> SI.InstalledPackageIndex -> ComponentId -> Maybe (FlaggedDep () PN)
convIPId pn' idx ipid =
case SI.lookupComponentId idx ipid of
Nothing -> Nothing
Just ipi -> let i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid)
pn = pkgName (sourcePackageId ipi)
in Just (D.Simple (Dep pn (Fixed i (Goal (P pn') []))) ())
-- | Convert a cabal-install source package index to the simpler,
-- more uniform index format of the solver.
convSPI' :: OS -> Arch -> CompilerInfo -> Bool ->
CI.PackageIndex SourcePackage -> [(PN, I, PInfo)]
convSPI' os arch cinfo strfl = L.map (convSP os arch cinfo strfl) . CI.allPackages
convSPI :: OS -> Arch -> CompilerInfo -> Bool ->
CI.PackageIndex SourcePackage -> Index
convSPI os arch cinfo strfl = mkIndex . convSPI' os arch cinfo strfl
-- | Convert a single source package into the solver-specific format.
convSP :: OS -> Arch -> CompilerInfo -> Bool -> SourcePackage -> (PN, I, PInfo)
convSP os arch cinfo strfl (SourcePackage (PackageIdentifier pn pv) gpd _ _pl) =
let i = I pv InRepo
in (pn, i, convGPD os arch cinfo strfl (PI pn i) gpd)
-- We do not use 'flattenPackageDescription' or 'finalizePackageDescription'
-- from 'Distribution.PackageDescription.Configuration' here, because we
-- want to keep the condition tree, but simplify much of the test.
-- | Convert a generic package description to a solver-specific 'PInfo'.
convGPD :: OS -> Arch -> CompilerInfo -> Bool ->
PI PN -> GenericPackageDescription -> PInfo
convGPD os arch comp strfl pi
(GenericPackageDescription pkg flags libs exes tests benchs) =
let
fds = flagInfo strfl flags
conv = convCondTree os arch comp pi fds (const True)
in
PInfo
(maybe [] (conv ComponentLib ) libs ++
maybe [] (convSetupBuildInfo pi) (setupBuildInfo pkg) ++
concatMap (\(nm, ds) -> conv (ComponentExe nm) ds) exes ++
prefix (Stanza (SN pi TestStanzas))
(L.map (\(nm, ds) -> conv (ComponentTest nm) ds) tests) ++
prefix (Stanza (SN pi BenchStanzas))
(L.map (\(nm, ds) -> conv (ComponentBench nm) ds) benchs))
fds
Nothing
prefix :: (FlaggedDeps comp qpn -> FlaggedDep comp' qpn) -> [FlaggedDeps comp qpn] -> FlaggedDeps comp' qpn
prefix _ [] = []
prefix f fds = [f (concat fds)]
-- | Convert flag information. Automatic flags are now considered weak
-- unless strong flags have been selected explicitly.
flagInfo :: Bool -> [PD.Flag] -> FlagInfo
flagInfo strfl = M.fromList . L.map (\ (MkFlag fn _ b m) -> (fn, FInfo b m (not (strfl || m))))
-- | Convert condition trees to flagged dependencies.
convCondTree :: OS -> Arch -> CompilerInfo -> PI PN -> FlagInfo ->
(a -> Bool) -> -- how to detect if a branch is active
Component ->
CondTree ConfVar [Dependency] a -> FlaggedDeps Component PN
convCondTree os arch cinfo pi@(PI pn _) fds p comp (CondNode info ds branches)
| p info = L.map (\d -> D.Simple (convDep pn d) comp) ds -- unconditional dependencies
++ concatMap (convBranch os arch cinfo pi fds p comp) branches
| otherwise = []
-- | Branch interpreter.
--
-- Here, we try to simplify one of Cabal's condition tree branches into the
-- solver's flagged dependency format, which is weaker. Condition trees can
-- contain complex logical expression composed from flag choices and special
-- flags (such as architecture, or compiler flavour). We try to evaluate the
-- special flags and subsequently simplify to a tree that only depends on
-- simple flag choices.
convBranch :: OS -> Arch -> CompilerInfo ->
PI PN -> FlagInfo ->
(a -> Bool) -> -- how to detect if a branch is active
Component ->
(Condition ConfVar,
CondTree ConfVar [Dependency] a,
Maybe (CondTree ConfVar [Dependency] a)) -> FlaggedDeps Component PN
convBranch os arch cinfo pi@(PI pn _) fds p comp (c', t', mf') =
go c' ( convCondTree os arch cinfo pi fds p comp t')
(maybe [] (convCondTree os arch cinfo pi fds p comp) mf')
where
go :: Condition ConfVar ->
FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN
go (Lit True) t _ = t
go (Lit False) _ f = f
go (CNot c) t f = go c f t
go (CAnd c d) t f = go c (go d t f) f
go (COr c d) t f = go c t (go d t f)
go (Var (Flag fn)) t f = extractCommon t f ++ [Flagged (FN pi fn) (fds ! fn) t f]
go (Var (OS os')) t f
| os == os' = t
| otherwise = f
go (Var (Arch arch')) t f
| arch == arch' = t
| otherwise = f
go (Var (Impl cf cvr)) t f
| matchImpl (compilerInfoId cinfo) ||
-- fixme: Nothing should be treated as unknown, rather than empty
-- list. This code should eventually be changed to either
-- support partial resolution of compiler flags or to
-- complain about incompletely configured compilers.
any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = t
| otherwise = f
where
matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv
-- If both branches contain the same package as a simple dep, we lift it to
-- the next higher-level, but without constraints. This heuristic together
-- with deferring flag choices will then usually first resolve this package,
-- and try an already installed version before imposing a default flag choice
-- that might not be what we want.
--
-- Note that we make assumptions here on the form of the dependencies that
-- can occur at this point. In particular, no occurrences of Fixed, and no
-- occurrences of multiple version ranges, as all dependencies below this
-- point have been generated using 'convDep'.
extractCommon :: FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN
extractCommon ps ps' = [ D.Simple (Dep pn1 (Constrained [(vr1 .||. vr2, Goal (P pn) [])])) comp
| D.Simple (Dep pn1 (Constrained [(vr1, _)])) _ <- ps
, D.Simple (Dep pn2 (Constrained [(vr2, _)])) _ <- ps'
, pn1 == pn2
]
-- | Convert a Cabal dependency to a solver-specific dependency.
convDep :: PN -> Dependency -> Dep PN
convDep pn' (Dependency pn vr) = Dep pn (Constrained [(vr, Goal (P pn') [])])
-- | Convert a Cabal package identifier to a solver-specific dependency.
convPI :: PN -> PackageIdentifier -> Dep PN
convPI pn' (PackageIdentifier pn v) = Dep pn (Constrained [(eqVR v, Goal (P pn') [])])
-- | Convert setup dependencies
convSetupBuildInfo :: PI PN -> SetupBuildInfo -> FlaggedDeps Component PN
convSetupBuildInfo (PI pn _i) nfo =
L.map (\d -> D.Simple (convDep pn d) ComponentSetup) (PD.setupDepends nfo)
| trskop/cabal | cabal-install/Distribution/Client/Dependency/Modular/IndexConversion.hs | bsd-3-clause | 10,637 | 0 | 20 | 2,556 | 2,771 | 1,457 | 1,314 | 134 | 9 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Maybe
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- The Maybe type, and associated operations.
--
-----------------------------------------------------------------------------
module Data.Maybe
(
Maybe(Nothing,Just)
, maybe
, isJust
, isNothing
, fromJust
, fromMaybe
, listToMaybe
, maybeToList
, catMaybes
, mapMaybe
) where
import GHC.Base
-- $setup
-- Allow the use of some Prelude functions in doctests.
-- >>> import Prelude ( (*), odd, show, sum )
-- ---------------------------------------------------------------------------
-- Functions over Maybe
-- | The 'maybe' function takes a default value, a function, and a 'Maybe'
-- value. If the 'Maybe' value is 'Nothing', the function returns the
-- default value. Otherwise, it applies the function to the value inside
-- the 'Just' and returns the result.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> maybe False odd (Just 3)
-- True
--
-- >>> maybe False odd Nothing
-- False
--
-- Read an integer from a string using 'readMaybe'. If we succeed,
-- return twice the integer; that is, apply @(*2)@ to it. If instead
-- we fail to parse an integer, return @0@ by default:
--
-- >>> import Text.Read ( readMaybe )
-- >>> maybe 0 (*2) (readMaybe "5")
-- 10
-- >>> maybe 0 (*2) (readMaybe "")
-- 0
--
-- Apply 'show' to a @Maybe Int@. If we have @Just n@, we want to show
-- the underlying 'Int' @n@. But if we have 'Nothing', we return the
-- empty string instead of (for example) \"Nothing\":
--
-- >>> maybe "" show (Just 5)
-- "5"
-- >>> maybe "" show Nothing
-- ""
--
maybe :: b -> (a -> b) -> Maybe a -> b
maybe n _ Nothing = n
maybe _ f (Just x) = f x
-- | The 'isJust' function returns 'True' iff its argument is of the
-- form @Just _@.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> isJust (Just 3)
-- True
--
-- >>> isJust (Just ())
-- True
--
-- >>> isJust Nothing
-- False
--
-- Only the outer constructor is taken into consideration:
--
-- >>> isJust (Just Nothing)
-- True
--
isJust :: Maybe a -> Bool
isJust Nothing = False
isJust _ = True
-- | The 'isNothing' function returns 'True' iff its argument is 'Nothing'.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> isNothing (Just 3)
-- False
--
-- >>> isNothing (Just ())
-- False
--
-- >>> isNothing Nothing
-- True
--
-- Only the outer constructor is taken into consideration:
--
-- >>> isNothing (Just Nothing)
-- False
--
isNothing :: Maybe a -> Bool
isNothing Nothing = True
isNothing _ = False
-- | The 'fromJust' function extracts the element out of a 'Just' and
-- throws an error if its argument is 'Nothing'.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> fromJust (Just 1)
-- 1
--
-- >>> 2 * (fromJust (Just 10))
-- 20
--
-- >>> 2 * (fromJust Nothing)
-- *** Exception: Maybe.fromJust: Nothing
--
fromJust :: Maybe a -> a
fromJust Nothing = errorWithoutStackTrace "Maybe.fromJust: Nothing" -- yuck
fromJust (Just x) = x
-- | The 'fromMaybe' function takes a default value and and 'Maybe'
-- value. If the 'Maybe' is 'Nothing', it returns the default values;
-- otherwise, it returns the value contained in the 'Maybe'.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> fromMaybe "" (Just "Hello, World!")
-- "Hello, World!"
--
-- >>> fromMaybe "" Nothing
-- ""
--
-- Read an integer from a string using 'readMaybe'. If we fail to
-- parse an integer, we want to return @0@ by default:
--
-- >>> import Text.Read ( readMaybe )
-- >>> fromMaybe 0 (readMaybe "5")
-- 5
-- >>> fromMaybe 0 (readMaybe "")
-- 0
--
fromMaybe :: a -> Maybe a -> a
fromMaybe d x = case x of {Nothing -> d;Just v -> v}
-- | The 'maybeToList' function returns an empty list when given
-- 'Nothing' or a singleton list when not given 'Nothing'.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> maybeToList (Just 7)
-- [7]
--
-- >>> maybeToList Nothing
-- []
--
-- One can use 'maybeToList' to avoid pattern matching when combined
-- with a function that (safely) works on lists:
--
-- >>> import Text.Read ( readMaybe )
-- >>> sum $ maybeToList (readMaybe "3")
-- 3
-- >>> sum $ maybeToList (readMaybe "")
-- 0
--
maybeToList :: Maybe a -> [a]
maybeToList Nothing = []
maybeToList (Just x) = [x]
-- | The 'listToMaybe' function returns 'Nothing' on an empty list
-- or @'Just' a@ where @a@ is the first element of the list.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> listToMaybe []
-- Nothing
--
-- >>> listToMaybe [9]
-- Just 9
--
-- >>> listToMaybe [1,2,3]
-- Just 1
--
-- Composing 'maybeToList' with 'listToMaybe' should be the identity
-- on singleton/empty lists:
--
-- >>> maybeToList $ listToMaybe [5]
-- [5]
-- >>> maybeToList $ listToMaybe []
-- []
--
-- But not on lists with more than one element:
--
-- >>> maybeToList $ listToMaybe [1,2,3]
-- [1]
--
listToMaybe :: [a] -> Maybe a
listToMaybe = foldr (const . Just) Nothing
{-# INLINE listToMaybe #-}
-- We define listToMaybe using foldr so that it can fuse via the foldr/build
-- rule. See #14387
-- | The 'catMaybes' function takes a list of 'Maybe's and returns
-- a list of all the 'Just' values.
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> catMaybes [Just 1, Nothing, Just 3]
-- [1,3]
--
-- When constructing a list of 'Maybe' values, 'catMaybes' can be used
-- to return all of the \"success\" results (if the list is the result
-- of a 'map', then 'mapMaybe' would be more appropriate):
--
-- >>> import Text.Read ( readMaybe )
-- >>> [readMaybe x :: Maybe Int | x <- ["1", "Foo", "3"] ]
-- [Just 1,Nothing,Just 3]
-- >>> catMaybes $ [readMaybe x :: Maybe Int | x <- ["1", "Foo", "3"] ]
-- [1,3]
--
catMaybes :: [Maybe a] -> [a]
catMaybes ls = [x | Just x <- ls]
-- | The 'mapMaybe' function is a version of 'map' which can throw
-- out elements. In particular, the functional argument returns
-- something of type @'Maybe' b@. If this is 'Nothing', no element
-- is added on to the result list. If it is @'Just' b@, then @b@ is
-- included in the result list.
--
-- ==== __Examples__
--
-- Using @'mapMaybe' f x@ is a shortcut for @'catMaybes' $ 'map' f x@
-- in most cases:
--
-- >>> import Text.Read ( readMaybe )
-- >>> let readMaybeInt = readMaybe :: String -> Maybe Int
-- >>> mapMaybe readMaybeInt ["1", "Foo", "3"]
-- [1,3]
-- >>> catMaybes $ map readMaybeInt ["1", "Foo", "3"]
-- [1,3]
--
-- If we map the 'Just' constructor, the entire list should be returned:
--
-- >>> mapMaybe Just [1,2,3]
-- [1,2,3]
--
mapMaybe :: (a -> Maybe b) -> [a] -> [b]
mapMaybe _ [] = []
mapMaybe f (x:xs) =
let rs = mapMaybe f xs in
case f x of
Nothing -> rs
Just r -> r:rs
{-# NOINLINE [1] mapMaybe #-}
{-# RULES
"mapMaybe" [~1] forall f xs. mapMaybe f xs
= build (\c n -> foldr (mapMaybeFB c f) n xs)
"mapMaybeList" [1] forall f. foldr (mapMaybeFB (:) f) [] = mapMaybe f
#-}
{-# INLINE [0] mapMaybeFB #-} -- See Note [Inline FB functions] in GHC.List
mapMaybeFB :: (b -> r -> r) -> (a -> Maybe b) -> a -> r -> r
mapMaybeFB cons f x next = case f x of
Nothing -> next
Just r -> cons r next
| ezyang/ghc | libraries/base/Data/Maybe.hs | bsd-3-clause | 7,435 | 0 | 10 | 1,555 | 785 | 522 | 263 | 57 | 2 |
module When (foo, fooOk) where
{-@ assume div :: x:_ -> y:{_ | y /= 0} -> _ @-}
{- when :: b:Bool -> {v:_ | ???} -> _ -}
when b x = if b then x else return ()
foo :: Int -> IO ()
foo x = when (x > 0) $ print (1 `div` x)
{-@ whenT :: b:_ -> ({v:_ | Prop b} -> _) -> _ @-}
whenT :: Bool -> (() -> IO ()) -> IO ()
whenT b k = if b then k () else return ()
fooOk :: Int -> IO ()
fooOk x = whenT (x > 0) $ \() -> print (1 `div` x)
| abakst/liquidhaskell | tests/todo/When.hs | bsd-3-clause | 432 | 0 | 10 | 122 | 201 | 107 | 94 | 8 | 2 |
-- |
-- Module : Data.Vector.Storable.Internal
-- Copyright : (c) Roman Leshchinskiy 2009-2010
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Ugly internal utility functions for implementing 'Storable'-based vectors.
--
module Data.Vector.Storable.Internal (
getPtr, setPtr, updPtr
) where
import Control.Monad.Primitive ( unsafeInlineIO )
import Foreign.Storable
import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.Marshal.Array ( advancePtr )
import GHC.Base ( quotInt )
import GHC.ForeignPtr ( ForeignPtr(..) )
import GHC.Ptr ( Ptr(..) )
{-@ getPtr :: f:ForeignPtrV a -> PtrN a {(fplen f)} @-}
getPtr :: ForeignPtr a -> Ptr a
{-# INLINE getPtr #-}
getPtr (ForeignPtr addr _) = Ptr addr
{-@ setPtr :: ForeignPtr a -> p:PtrV a -> ForeignPtrN a {(plen p)} @-}
setPtr :: ForeignPtr a -> Ptr a -> ForeignPtr a
{-# INLINE setPtr #-}
setPtr (ForeignPtr _ c) (Ptr addr) = ForeignPtr addr c
{-@ type PtrP a P = PtrN a {(plen P)} @-}
{-@ type ForeignPtrP a P = ForeignPtrN a {(fplen P)} @-}
{-@ updPtr :: (p:PtrV a -> PtrP a p) -> f:ForeignPtrV a -> ForeignPtrP a f @-}
updPtr :: (Ptr a -> Ptr a) -> ForeignPtr a -> ForeignPtr a
{-# INLINE updPtr #-}
updPtr f (ForeignPtr p c) = case f (Ptr p) of { Ptr q -> ForeignPtr q c }
| mightymoose/liquidhaskell | benchmarks/vector-0.10.0.1/Data/Vector/Storable/Internal.hs | bsd-3-clause | 1,386 | 0 | 8 | 285 | 277 | 157 | 120 | 19 | 1 |
import Test.Cabal.Prelude
-- Test if detailed-0.9 builds correctly and runs
-- when linked dynamically
-- See https://github.com/haskell/cabal/issues/4270
main = setupAndCabalTest $ do
skipUnless =<< hasSharedLibraries
skipUnless =<< hasCabalShared
skipUnless =<< hasCabalForGhc
setup_build ["--enable-tests", "--enable-executable-dynamic"]
setup "test" []
| mydaum/cabal | cabal-testsuite/PackageTests/Regression/T4270/setup.test.hs | bsd-3-clause | 367 | 0 | 9 | 47 | 62 | 31 | 31 | 7 | 1 |
import P
main = putStrLn p
| ezyang/ghc | testsuite/tests/cabal/cabal08/Main.hs | bsd-3-clause | 27 | 0 | 5 | 6 | 12 | 6 | 6 | 2 | 1 |
module Rebase.Control.Monad.Writer.Strict
(
module Control.Monad.Writer.Strict
)
where
import Control.Monad.Writer.Strict
| nikita-volkov/rebase | library/Rebase/Control/Monad/Writer/Strict.hs | mit | 125 | 0 | 5 | 12 | 26 | 19 | 7 | 4 | 0 |
module HAD.Y2014.M02.D24.Exercise where
import Data.List
-- | Filter a list, keeping an element only if it is equal to the next one.
--
-- Examples:
-- >>> filterByPair []
-- []
-- >>> filterByPair [1 .. 10]
-- []
-- >>> filterByPair [1, 2, 2, 2, 3, 3, 4]
-- [2,2,3]
filterByPair :: Eq a => [a] -> [a]
filterByPair = (>>= init).group
| smwhr/1HAD | exercises/HAD/Y2014/M02/D24/Exercise.hs | mit | 337 | 0 | 7 | 66 | 56 | 38 | 18 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-customerrorresponse.html
module Stratosphere.ResourceProperties.CloudFrontDistributionCustomErrorResponse where
import Stratosphere.ResourceImports
-- | Full data type definition for CloudFrontDistributionCustomErrorResponse.
-- See 'cloudFrontDistributionCustomErrorResponse' for a more convenient
-- constructor.
data CloudFrontDistributionCustomErrorResponse =
CloudFrontDistributionCustomErrorResponse
{ _cloudFrontDistributionCustomErrorResponseErrorCachingMinTTL :: Maybe (Val Double)
, _cloudFrontDistributionCustomErrorResponseErrorCode :: Val Integer
, _cloudFrontDistributionCustomErrorResponseResponseCode :: Maybe (Val Integer)
, _cloudFrontDistributionCustomErrorResponseResponsePagePath :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON CloudFrontDistributionCustomErrorResponse where
toJSON CloudFrontDistributionCustomErrorResponse{..} =
object $
catMaybes
[ fmap (("ErrorCachingMinTTL",) . toJSON) _cloudFrontDistributionCustomErrorResponseErrorCachingMinTTL
, (Just . ("ErrorCode",) . toJSON) _cloudFrontDistributionCustomErrorResponseErrorCode
, fmap (("ResponseCode",) . toJSON) _cloudFrontDistributionCustomErrorResponseResponseCode
, fmap (("ResponsePagePath",) . toJSON) _cloudFrontDistributionCustomErrorResponseResponsePagePath
]
-- | Constructor for 'CloudFrontDistributionCustomErrorResponse' containing
-- required fields as arguments.
cloudFrontDistributionCustomErrorResponse
:: Val Integer -- ^ 'cfdcerErrorCode'
-> CloudFrontDistributionCustomErrorResponse
cloudFrontDistributionCustomErrorResponse errorCodearg =
CloudFrontDistributionCustomErrorResponse
{ _cloudFrontDistributionCustomErrorResponseErrorCachingMinTTL = Nothing
, _cloudFrontDistributionCustomErrorResponseErrorCode = errorCodearg
, _cloudFrontDistributionCustomErrorResponseResponseCode = Nothing
, _cloudFrontDistributionCustomErrorResponseResponsePagePath = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-customerrorresponse.html#cfn-cloudfront-distribution-customerrorresponse-errorcachingminttl
cfdcerErrorCachingMinTTL :: Lens' CloudFrontDistributionCustomErrorResponse (Maybe (Val Double))
cfdcerErrorCachingMinTTL = lens _cloudFrontDistributionCustomErrorResponseErrorCachingMinTTL (\s a -> s { _cloudFrontDistributionCustomErrorResponseErrorCachingMinTTL = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-customerrorresponse.html#cfn-cloudfront-distribution-customerrorresponse-errorcode
cfdcerErrorCode :: Lens' CloudFrontDistributionCustomErrorResponse (Val Integer)
cfdcerErrorCode = lens _cloudFrontDistributionCustomErrorResponseErrorCode (\s a -> s { _cloudFrontDistributionCustomErrorResponseErrorCode = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-customerrorresponse.html#cfn-cloudfront-distribution-customerrorresponse-responsecode
cfdcerResponseCode :: Lens' CloudFrontDistributionCustomErrorResponse (Maybe (Val Integer))
cfdcerResponseCode = lens _cloudFrontDistributionCustomErrorResponseResponseCode (\s a -> s { _cloudFrontDistributionCustomErrorResponseResponseCode = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-customerrorresponse.html#cfn-cloudfront-distribution-customerrorresponse-responsepagepath
cfdcerResponsePagePath :: Lens' CloudFrontDistributionCustomErrorResponse (Maybe (Val Text))
cfdcerResponsePagePath = lens _cloudFrontDistributionCustomErrorResponseResponsePagePath (\s a -> s { _cloudFrontDistributionCustomErrorResponseResponsePagePath = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/CloudFrontDistributionCustomErrorResponse.hs | mit | 3,963 | 0 | 13 | 309 | 448 | 254 | 194 | 38 | 1 |
pyths :: Int -> [(Int, Int, Int)]
pyths n = [(x, y, z) | x <- ns, y <- ns, z <- ns, x^2 + y^2 == z^2]
where ns = [1..n]
main = do
print $ pyths 10
| fabioyamate/programming-in-haskell | ch05/ex03.hs | mit | 156 | 0 | 11 | 48 | 117 | 63 | 54 | 5 | 1 |
{-|
Module: Y2015
Description: Advent of Code Day Solutions.
License: MIT
Maintainer: @tylerjl
Solutions to the set of problems for <adventofcode.com>.
Each day is broken up into an individual module with accompaying spec tests
and (possibly?) benchmarks.
Each day's module exports are re-exported here for convenience when importing
'Y2015' et. al.
-}
module Y2015 (module X) where
import Y2015.Util as X
import Y2015.D01 as X
import Y2015.D02 as X
import Y2015.D03 as X
import Y2015.D04 as X
import Y2015.D05 as X
import Y2015.D06 as X
import Y2015.D07 as X
import Y2015.D08 as X
import Y2015.D09 as X
import Y2015.D10 as X
import Y2015.D11 as X
import Y2015.D12 as X
import Y2015.D13 as X
import Y2015.D14 as X
import Y2015.D15 as X
import Y2015.D16 as X
import Y2015.D17 as X
import Y2015.D18 as X
import Y2015.D19 as X
import Y2015.D20 as X
import Y2015.D21 as X
import Y2015.D22 as X
import Y2015.D23 as X
import Y2015.D24 as X
import Y2015.D25 as X
| tylerjl/adventofcode | src/Y2015.hs | mit | 970 | 0 | 4 | 175 | 193 | 138 | 55 | 27 | 0 |
module Network.BitFunctor.Account.Tests where
import Network.BitFunctor.Account
import Network.BitFunctor.Account.Arbitrary()
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (testCase)
import Test.Tasty.QuickCheck (testProperty)
import Test.HUnit (Assertion, (@?=))
tests :: TestTree
tests = testGroup "Network.BitFunctor.Account.Tests"
[ testProperty "from_accountid(to_accountid(account)).pubkey == account.pubkey" prop_accountid_inv_over_pk
, testProperty "from_accountid(to_accountid(account)).seckey == Nothing" prop_accountid_erases_sk
, testProperty "to_accountid(from_accountid(accountid)) == accountid" prop_accountid_inv_over_accountid
]
prop_accountid_inv_over_pk :: Account -> Bool
prop_accountid_inv_over_pk acc = pubKey (fromAccountId $ toAccountId acc) == pubKey acc
prop_accountid_erases_sk :: Account -> Bool
prop_accountid_erases_sk acc = secKey (fromAccountId $ toAccountId acc) == Nothing
prop_accountid_inv_over_accountid :: AccountId -> Bool
prop_accountid_inv_over_accountid accId = (toAccountId . fromAccountId) accId == accId
| BitFunctor/bitfunctor | test/src/Network/BitFunctor/Account/Tests.hs | mit | 1,082 | 0 | 9 | 107 | 215 | 120 | 95 | 18 | 1 |
-- Author: G4BB3R
-- Performance: 7643.2s (How to improve ?)
primes :: [Int]
primes = sieve [2..]
where sieve :: [Int] -> [Int] ;
sieve (p:xs) = p : sieve [x | x <- xs, x `rem` p /= 0] ;
sieve [] = []
main :: IO ()
main = print $ sum $ takeWhile (< 2000000) primes | DestructHub/ProjectEuler | Problem010/Haskell/solution_slow_1.hs | mit | 276 | 3 | 10 | 70 | 135 | 74 | 61 | 7 | 2 |
-- Much of the code in this file is adapted from
-- the ML version at here: http://www.cs.princeton.edu/~appel/modern/ml/chap8/canon.sml
module TigerCanon
(
linearize
, basicblocks
, tracesched
, canonicalize
)
where
import qualified TigerTemp as Tmp
import TigerITree
import TigerGenSymLabTmp
import qualified Data.Map as Map
import Control.Monad.Identity
import Prelude hiding (EQ, LT, GT)
commute :: Stm -> Exp -> Bool
commute (EXP(CONST _ _)) _ = True
commute _ (NAME _) = True
commute _ (CONST _ _) = True
commute _ _ = False
nop :: Stm
nop = EXP(CONST 0 False)
type Canon = GenSymLabTmp Identity
infixl % -- combines two Stm, ignores nop
(%) :: Stm -> Stm -> Stm
s % (EXP(CONST _ _)) = s
(EXP(CONST _ _)) % s = s
a % b = SEQ(a, b)
notrel :: Relop -> Relop
notrel EQ = NE
notrel NE = EQ
notrel LT = GE
notrel GT = LE
notrel LE = GT
notrel GE = LT
notrel ULT = UGE
notrel ULE = UGT
notrel UGT = ULE
notrel UGE = ULT
notrel FEQ = FNE
notrel FNE = FEQ
notrel FLT = FGE
notrel FLE = FGT
notrel FGT = FLE
notrel FGE = FLT
linearize :: Stm -> Canon [Stm]
linearize stmtobelinearized =
let
reorder dofunc (exps, build) =
let f ((e@(CALL _ iscptr _)):rest) =
do t <- newTemp iscptr
f $ ESEQ(MOVE(TEMP t iscptr, e), TEMP t iscptr):rest
f (a:rest) =
do (stm0, e) <- dofunc a
(stm1, el) <- f rest
if commute stm1 e
then return (stm0 % stm1, e:el)
else do let iseptr = isExpPtr e
t <- newTemp iseptr
return (stm0 % MOVE(TEMP t iseptr, e) % stm1, TEMP t iseptr:el)
f [] = return (nop, [])
in do (stm0, el) <- f exps
return (stm0, build el)
expl :: [Exp] -> ([Exp] -> Exp) -> Canon (Stm, Exp)
expl el f = reorder doexp (el, f)
expl' :: [Exp] -> ([Exp] -> Stm) -> Canon Stm
expl' el f = do (stm0, s) <- reorder doexp (el, f)
return $ stm0 % s
doexp :: Exp -> Canon (Stm, Exp)
doexp (BINOP(p, a, b) isbptr) =
expl [a, b] $ \[l, r] -> BINOP(p, l, r) isbptr
doexp (CVTOP(p, a, s1, s2)) =
expl [a] $ \[arg] -> CVTOP(p, arg, s1, s2)
doexp (MEM(a, sz) ismemptr) =
expl [a] $ \[arg] -> MEM(arg, sz) ismemptr
doexp (ESEQ(s, e)) =
do s' <- dostm s
(s'', expr) <- expl [e] $ \[e'] -> e'
return (s' % s'', expr)
doexp (CALL(e, el) iscptr retlab) =
expl (e:el) $ \(func:args) -> CALL(func, args) iscptr retlab
doexp e = expl [] $ \[] -> e
dostm :: Stm -> Canon Stm
dostm (SEQ(a, b)) = do a' <- dostm a
b' <- dostm b
return $ a' % b'
dostm (JUMP(e, labs)) =
expl' [e] $ \[e'] -> JUMP(e', labs)
dostm (CJUMP(TEST(p, a, b), t, f)) =
expl' [a, b] $ \[a', b'] -> CJUMP(TEST(p, a', b'), t, f)
dostm (MOVE(TEMP t istptr, CALL(e, el) iscptr retlab)) =
expl' (e:el) $ \(func:args) -> MOVE(TEMP t istptr, CALL(func, args) iscptr retlab)
dostm (MOVE(TEMP t istptr, b)) =
expl' [b] $ \[src] -> MOVE(TEMP t istptr, src)
dostm (MOVE(MEM(e, sz) ismemptr, src)) =
expl' [e, src] $ \[e', src'] -> MOVE(MEM(e', sz) ismemptr, src')
dostm (MOVE(ESEQ(s, e), src)) =
do s' <- dostm s
src' <- dostm $ MOVE(e, src)
return $ s' % src'
dostm (MOVE(a, b)) =
expl' [a, b] $ \[a', b'] -> MOVE(a', b')
dostm (EXP(CALL(e, el) iscptr retlab)) =
expl' (e:el) $ \(func:arg) -> EXP(CALL(func, arg) iscptr retlab)
dostm (EXP e) =
expl' [e] $ \[e'] -> EXP e'
dostm s =
expl' [] $ \[] -> s
linear (SEQ(a, b)) l = linear a $ linear b l
linear s l = s : l
in do stm' <- dostm stmtobelinearized
return $ linear stm' []
basicblocks :: [Stm] -> Canon ([[Stm]], Tmp.Label)
basicblocks stms0 =
do done <- newLabel
let blocks ((blkhead@(LABEL _)):blktail) blist =
let next ((s@(JUMP _)):rest) thisblock =
endblock rest $ s:thisblock
next ((s@(CJUMP _)):rest) thisblock =
endblock rest $ s:thisblock
next (stms@(LABEL lab: _)) thisblock =
next (JUMP(NAME lab, [lab]):stms) thisblock
next (s:rest) thisblock =
next rest $ s:thisblock
next [] thisblock = next [JUMP(NAME done, [done])] thisblock
endblock more thisblock =
blocks more $ reverse thisblock:blist
in next blktail [blkhead]
blocks [] blist = return $ reverse blist
blocks stms blist =
do newlab <- newLabel
blocks (LABEL newlab:stms) blist
blks <- blocks stms0 []
return (blks, done)
enterblock :: [Stm] -> Map.Map Tmp.Label [Stm] -> Map.Map Tmp.Label [Stm]
enterblock b@(LABEL lab:_) table = Map.insert lab b table
enterblock _ table = table
splitlast :: [a] -> ([a], a)
splitlast [x] = ([], x)
splitlast (x:xs) = let (blktail, l) = splitlast xs in (x:blktail, l)
splitlast _ = error "Compiler error: splitlast."
trace :: Map.Map Tmp.Label [Stm] -> [Stm] -> [[Stm]] -> Canon [Stm]
trace table b@(LABEL lab0:_) rest =
let table1 = Map.insert lab0 [] table
in case splitlast b of
(most, JUMP(NAME lab, _)) ->
case Map.lookup lab table1 of
(Just b'@(_:_)) -> do t <- trace table1 b' rest
return $ most ++ t
_ -> do next <- getnext table1 rest
return $ b ++ next
(most, CJUMP(TEST(opr, x, y), t, f)) ->
case (Map.lookup t table1, Map.lookup f table1) of
(_, Just b'@(_:_)) -> do tr <- trace table1 b' rest
return $ b ++ tr
(Just b'@(_:_), _) -> do tc <- trace table1 b' rest
let cjump = [CJUMP(TEST(notrel opr, x, y), f, t)]
return $ most ++ cjump ++ tc
_ -> do f' <- newLabel
let cjump = [CJUMP(TEST(opr, x, y), t, f'), LABEL f',
JUMP(NAME f, [f])]
n <- getnext table1 rest
return $ most ++ cjump ++ n
(_, JUMP _) -> do next <- getnext table1 rest
return $ b ++ next
_ -> error "Compiler error: trace -> case splitlast b of."
trace _ _ _ = error "Compiler error: trace."
getnext :: Map.Map Tmp.Label [Stm] -> [[Stm]] -> Canon [Stm]
getnext table (b@(LABEL lab:_):rest) =
case Map.lookup lab table of
Just (_:_) -> trace table b rest
_ -> getnext table rest
getnext _ [] = return []
getnext _ _ = error "Compiler error: getnext."
tracesched :: ([[Stm]], Tmp.Label) -> Canon [Stm]
tracesched (blocks, done) =
do n <- getnext (foldr enterblock Map.empty blocks) blocks
return $ n ++ [LABEL done]
canonicalize :: Stm -> GenSymLabTmpState -> ([Stm], GenSymLabTmpState)
canonicalize stm state = let monad = do stms <- linearize stm
blocks <- basicblocks stms
tracesched blocks
result = (runIdentity . runGSLT state) monad
in result
| hengchu/tiger-haskell | src/tigercanon.hs | mit | 7,295 | 0 | 24 | 2,493 | 3,414 | 1,773 | 1,641 | 181 | 20 |
-- ------------------------------------------------------------
module Holumbus.Crawler.RobotTypes
where
import Control.DeepSeq
import Data.Binary (Binary)
import qualified Data.Binary as B
import Data.Char
import qualified Data.Map.Strict as M
import Holumbus.Crawler.URIs
import Text.XML.HXT.Core
{-
import Text.XML.HXT.RelaxNG.XmlSchema.RegexMatch
import qualified Debug.Trace as D
-}
-- ------------------------------------------------------------
type Robots = M.Map URI RobotRestriction
type RobotRestriction = [RobotSpec]
type RobotSpec = (URI, RobotAction)
data RobotAction = Disallow | Allow
deriving (Eq, Show, Read, Enum)
type AddRobotsAction = URI -> Robots -> IO Robots
-- ------------------------------------------------------------
instance Binary RobotAction where
put = B.put . fromEnum
get = B.get >>= return . toEnum
instance NFData RobotAction where
instance XmlPickler RobotAction where
xpickle = xpPrim
xpRobots :: PU Robots
xpRobots = xpElem "robots" $
xpMap "robot" "host" xpText xpRobotRestriction
xpRobotRestriction :: PU RobotRestriction
xpRobotRestriction = xpList $
xpElem "restriction" $
xpPair ( xpAttr "href" $ xpText )
( xpAttr "access" $ xpickle )
-- ------------------------------------------------------------
emptyRobots :: Robots
emptyRobots = M.singleton "" []
robotsExtend :: String -> AddRobotsAction
robotsExtend _robotName _uri robots
= return robots -- TODO
robotsIndex :: URI -> Robots -> Bool
robotsIndex _uri _robots
= True -- TODO
robotsFollow :: URI -> Robots -> Bool
robotsFollow _uri _robots
= True -- TODO
-- ------------------------------------------------------------
robotsNo :: String -> LA XmlTree XmlTree
robotsNo what = none
`when`
( this
/> hasName "html"
/> hasName "head"
/> hasName "meta" -- getByPath ["html", "head", "meta"]
>>>
hasAttrValue "name" ( map toUpper
>>>
(== "ROBOTS")
)
>>>
getAttrValue0 "content"
>>>
isA ( map (toUpper
>>>
(\ x -> if isLetter x then x else ' ')
)
>>>
words
>>>
(what `elem`)
)
)
-- | robots no index filter. This filter checks HTML documents
-- for a \<meta name=\"robots\" content=\"noindex\"\> in the head of the document
robotsNoIndex :: ArrowXml a => a XmlTree XmlTree
robotsNoIndex = fromLA $ robotsNo "NOINDEX"
-- | robots no follow filter. This filter checks HTML documents
-- for a \<meta name=\"robots\" content=\"nofollow\"\> in the head of the document
robotsNoFollow :: ArrowXml a => a XmlTree XmlTree
robotsNoFollow = fromLA $ robotsNo "NOFOLLOW"
-- ------------------------------------------------------------
| ichistmeinname/holumbus | src/Holumbus/Crawler/RobotTypes.hs | mit | 3,693 | 0 | 18 | 1,507 | 567 | 312 | 255 | 64 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
module NFA.AI where
import Autolib.NFA.Example
import Autolib.NFA.Type (NFA)
import Autolib.Set
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data AI = AI { name :: String -- abkürzung
, automat :: NFA Char Int
}
deriving ( Typeable )
example :: AI
example =
let sigma = mkSet "abc"
in AI { name = "irgendeine Sprache"
, automat = example_sigma sigma
}
$(derives [makeReader, makeToDoc] [''AI])
instance Show AI where show = render . toDoc
instance Read AI where readsPrec = parsec_readsPrec | marcellussiegburg/autotool | collection/src/NFA/AI.hs | gpl-2.0 | 625 | 18 | 10 | 133 | 176 | 103 | 73 | 20 | 1 |
-- Copyright (c) 2015 Nicola Bonelli <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE LambdaCase #-}
module Main where
import Data.Semigroup
import Data.List
import Data.List.Split
import Data.Maybe
import Data.Char
import Control.Applicative
import Control.Monad
import Control.Monad.State
import Control.Concurrent (threadDelay)
import Data.Data
import System.Console.ANSI
import System.Console.CmdArgs
import System.Directory (getHomeDirectory, doesFileExist)
import System.IO.Unsafe
import System.IO.Error
import System.Process
import System.Exit
import System.FilePath
import System.Posix.Signals
import System.Posix.Types
proc_cpuinfo, proc_modules :: String
proc_cpuinfo = "/proc/cpuinfo"
proc_modules = "/proc/modules"
bold = setSGRCode [SetConsoleIntensity BoldIntensity]
reset = setSGRCode []
version = "5.2"
configFiles = [ "/etc/pfq.conf", "/root/.pfq.conf" ]
data YesNo = Yes | No | Unspec
deriving (Show, Read, Eq)
newtype OptString = OptString { getOptString :: String }
deriving (Show, Read, Eq)
newtype OptList a = OptList { getOptList :: [a] }
deriving (Show, Read, Eq)
instance Semigroup OptString where
a <> OptString "" = a
_ <> b = b
instance Semigroup (OptList a) where
a <> OptList [] = a
_ <> b = b
data Device =
Device
{ devname :: String
, devspeed :: Maybe Int
, channels :: Maybe Int
, flowctrl :: YesNo
, ethopt :: [(String, String)]
} deriving (Show, Read, Eq)
data Driver =
Driver
{ drvmod :: String
, drvopt :: [String]
, instances :: Int
, devices :: [Device]
} deriving (Show, Read, Eq)
data Config = Config
{ pfq_module :: String
, pfq_options :: [String]
, exclude_core :: [Int]
, irq_affinity :: [String]
, cpu_governor :: String
, drivers :: [Driver]
} deriving (Show, Read, Eq)
instance Semigroup Config where
(Config mod1 opt1 excl1 algo1 gov1 drvs1) <> (Config mod2 opt2 excl2 algo2 gov2 drvs2) =
Config
{ pfq_module = getOptString $ OptString mod1 <> OptString mod2
, pfq_options = getOptList $ OptList opt1 <> OptList opt2
, exclude_core = excl1 <> excl2
, irq_affinity = algo1 <> algo2
, cpu_governor = getOptString $ OptString gov1 <> OptString gov2
, drivers = drvs1 <> drvs2
}
data Options = Options
{ config :: Maybe String
, kmodule :: String
, algorithm :: String
, governor :: String
, first_core :: Int
, exclude :: [Int]
, queues :: Maybe Int
, others :: [String]
} deriving (Show, Read, Data, Typeable)
options :: Mode (CmdArgs Options)
options = cmdArgsMode $ Options
{ config = Nothing &= typ "FILE" &= help "Specify config file (default ~/.pfq.conf)"
, kmodule = "" &= help "Override the kmodule specified in config file"
, queues = Nothing &= help "Specify hardware channels (i.e. Intel RSS)"
, algorithm = "" &= help "Irq affinity algorithm: naive, round-robin, even, odd, all-in:id, comb:id"
, governor = "" &= help "Set cpufreq governor"
, first_core = 0 &= typ "NUM" &= help "First core used for irq affinity"
, exclude = [] &= typ "CORE" &= help "Exclude core from irq affinity"
, others = [] &= args
} &= summary ("pfq-load " ++ version) &= program "pfq-load"
-------------------------------------------------------------------------------------------
main :: IO ()
main = do
-- load options...
home <- getHomeDirectory
opt <- cmdArgsRun options
conf <- (<> mkConfig opt) <$> loadConfig (catMaybes (config opt : map Just configFiles)) opt
pmod <- getProcModules
core <- getNumberOfPhyCores
bal <- getProcessID "irqbalance"
frd <- getProcessID "cpufreqd"
-- check queues
when (maybe False (> core) (queues opt)) $ error "queues number is too big!"
-- unload pfq and drivers that depend on it...
evalStateT (unloadModule "pfq") pmod
-- check irqbalance deaemon
unless (null bal) $ do
putStrBoldLn $ "Irqbalance daemon detected @pid " ++ show bal ++ ". Sending SIGKILL..."
forM_ bal $ signalProcess sigKILL
-- check cpufreqd deaemon
unless (null frd) $ do
putStrBoldLn $ "Cpufreqd daemon detected @pid " ++ show frd ++ ". Sending SIGKILL..."
forM_ frd $ signalProcess sigKILL
-- set cpufreq governor...
runSystem ("/usr/bin/cpufreq-set -g " ++ cpu_governor conf) ("*** cpufreq-set error! Make sure you have cpufrequtils installed! *** ", True)
-- load PFQ...
if null (pfq_module conf)
then loadModule ProbeMod "pfq" (pfq_options conf)
else loadModule InsertMod (pfq_module conf) (pfq_options conf)
-- update current loaded proc/modules
pmod2 <- getProcModules
-- unload drivers...
unless (null (drivers conf)) $ do
putStrBoldLn "Unloading vanilla/standard drivers..."
evalStateT (forM_ (drivers conf) $ unloadModule . takeBaseName . drvmod) pmod2
-- load and configure device drivers...
forM_ (drivers conf) $ \drv -> do
let rss = maybe [] (mkRssOption (drvmod drv) (instances drv)) (queues opt)
loadModule InsertMod (drvmod drv) (drvopt drv ++ rss)
forM_ (devices drv) $ setupDevice (queues opt)
-- set interrupt affinity...
putStrBoldLn "Setting irq affinity..."
setupIRQAffinity (first_core opt) (exclude_core conf) (irq_affinity conf) (getDevices conf)
putStrBoldLn "PFQ ready."
mkRssOption :: String -> Int -> Int -> [String]
mkRssOption driver ndev nqueue
| "ixgbe.ko" `isSuffixOf` driver = [ "RSS=" ++ intercalate "," (replicate ndev (show nqueue)) ]
| "igb.ko" `isSuffixOf` driver = [ "RSS=" ++ intercalate "," (replicate ndev (show nqueue)) ]
| otherwise = []
mkConfig :: Options -> Config
mkConfig
Options
{ config = _
, kmodule = mod
, algorithm = algo
, exclude = excl
, governor = gov
, others = opt } =
Config
{ pfq_module = mod
, pfq_options = opt
, exclude_core = excl
, irq_affinity = [algo | not (null algo)]
, cpu_governor = gov
, drivers = []
}
getFirstConfig :: [FilePath] -> IO (Maybe FilePath)
getFirstConfig xs = filterM doesFileExist xs >>= \case
[ ] -> return Nothing
(x:_) -> return $ Just x
clean :: String -> String
clean = unlines . filter notComment . lines
where notComment = (not . ("#" `isPrefixOf`)) . dropWhile isSpace
loadConfig :: [FilePath] -> Options -> IO Config
loadConfig confs opt =
getFirstConfig confs >>= \case
Nothing -> putStrBoldLn "Using default config..." >> return (mkConfig opt)
Just conf -> putStrBoldLn ("Using " ++ conf ++ " config...") >>
liftM (read . clean) (readFile conf)
getNumberOfPhyCores :: IO Int
getNumberOfPhyCores = readFile proc_cpuinfo >>= \file ->
return $ (length . filter (isInfixOf "processor") . lines) file
type ProcModules = [ (String, [String]) ]
type ModStateT = StateT ProcModules
getProcModules :: IO ProcModules
getProcModules =
readFile proc_modules >>= \file ->
return $ map (\l -> let ts = words l in (head ts, filter (\s -> (not . null) s && s /= "-") $
splitOn "," (ts !! 3))) $ lines file
rmmodFromProcMOdules :: String -> ProcModules -> ProcModules
rmmodFromProcMOdules name = filter (\(m,ds) -> m /= name )
getProcessID :: String -> IO [ProcessID]
getProcessID name = liftM (map read . words) $ catchIOError (readProcess "/bin/pidof" [name] "") (\_-> return [])
moduleDependencies :: String -> ProcModules -> [String]
moduleDependencies name =
concatMap (\(m,ds) -> if m == name then ds else [])
unloadModule :: String -> ModStateT IO ()
unloadModule name = do
proc_mods <- get
forM_ (moduleDependencies name proc_mods) unloadModule
when (isModuleLoaded name proc_mods) $ do
liftIO $ rmmod name
put $ rmmodFromProcMOdules name proc_mods
where rmmod name = do
putStrBoldLn $ "Unloading " ++ name ++ "..."
runSystem ("/sbin/rmmod " ++ name) ("rmmod " ++ name ++ " error.", True)
isModuleLoaded name = any (\(mod,_) -> mod == name)
data LoadMode = InsertMod | ProbeMod
deriving Eq
loadModule :: LoadMode -> String -> [String] -> IO ()
loadModule mode name opts = do
putStrBoldLn $ "Loading " ++ name ++ "..."
runSystem (tool ++ " " ++ name ++ " " ++ unwords opts) ("insmod " ++ name ++ " error.", True)
where tool = if mode == InsertMod then "/sbin/insmod"
else "/sbin/modprobe"
setupDevice :: Maybe Int -> Device -> IO ()
setupDevice queues (Device dev speed channels fctrl opts) = do
putStrBoldLn $ "Activating " ++ dev ++ "..."
threadDelay 1000000
runSystem ("/sbin/ifconfig " ++ dev ++ " up") ("ifconfig error!", True)
threadDelay 1000000
case fctrl of
No -> do
putStrBoldLn $ "Disabling flow control for " ++ dev ++ "..."
runSystem ("/sbin/ethtool -A " ++ dev ++ " autoneg off rx off tx off") ("ethtool: flowctrl error!", False)
Yes -> do
putStrBoldLn $ "Enabling flow control for " ++ dev ++ "..."
runSystem ("/sbin/ethtool -A " ++ dev ++ " autoneg on rx on tx on") ("ethtool: flowctrl error!", False)
Unspec -> return ()
threadDelay 1000000
when (isJust speed) $ do
let s = fromJust speed
putStrBoldLn $ "Setting speed (" ++ show s ++ ") for " ++ dev ++ "..."
runSystem ("/sbin/ethtool -s " ++ dev ++ " speed " ++ show s ++ " duplex full") ("ethtool: set speed error!", False)
threadDelay 1000000
when (isJust queues || isJust channels) $ do
let c = fromJust (queues <|> channels)
putStrBoldLn $ "Setting channels to " ++ show c ++ "..."
runSystem ("/sbin/ethtool -L " ++ dev ++ " combined " ++ show c) ("", False)
forM_ opts $ \(opt, arg) -> do
threadDelay 1000000
runSystem ("/sbin/ethtool " ++ opt ++ " " ++ dev ++ " " ++ arg) ("ethtool:" ++ opt ++ " error!", True)
getDevices :: Config -> [String]
getDevices conf =
map devname (concatMap devices (drivers conf))
setupIRQAffinity :: Int -> [Int] -> [String] -> [String] -> IO ()
setupIRQAffinity fc excl algs devs = do
let excl_opt = unwords (map (\n -> " -e " ++ show n) excl)
let affinity = zip algs (tails devs)
unless (null affinity) $
forM_ affinity $ \(alg, devs') ->
runSystem ("irq-affinity -f " ++ show fc ++ " " ++ excl_opt ++ " -a " ++ alg ++ " -m TxRx " ++ unwords devs') ("irq-affinity error!", True)
runSystem :: String -> (String,Bool) -> IO ()
runSystem cmd (errmsg,term) = do
putStrLn $ "-> " ++ cmd
system cmd >>= \ec -> when (ec /= ExitSuccess) $ (if term then error else putStrLn) errmsg
putStrBoldLn :: String -> IO ()
putStrBoldLn msg = putStrLn $ bold ++ msg ++ reset
| pandaychen/PFQ | user/pfq-load/pfq-load.hs | gpl-2.0 | 11,952 | 0 | 23 | 3,148 | 3,478 | 1,807 | 1,671 | 240 | 3 |
{-# LANGUAGE
RecordWildCards
, BangPatterns
, TypeSynonymInstances
, FlexibleContexts
#-}
module VirMat.Core.Packer
( runPacker
, runPacker2D
, setForce
, setDisp
) where
import Data.List (foldl')
import Data.IntMap (IntMap)
import Data.IntSet (IntSet)
import Data.Vector (Vector, (!))
import Linear.Vect
import qualified Data.IntMap as IM
import qualified Data.IntSet as IS
import qualified Data.Vector as V
import DeUni.DeWall
import Hammer.VTK
runPacker :: (Packer v, Buildable S2 v, Norm Double v)
=> Int -> Box v -> SetPoint v -> IntMap (S2 v) -> (SetPoint v, IntMap (S2 v))
runPacker n box ps wall = let
interDeUni = 2
smooth = 15
(_, arrF1, wallF1) = foldl' pack (ps, ps, wall) [1..n]
pack (!ps0,!ps1,!wall1) i = let
ps2 = updateSP box wall1 ps0 ps1 0.6 time
len2 = V.length ps2
psID2 = [0 .. len2 - 1]
time
| i > smooth = 0.2
| otherwise = 0.2 * fromIntegral i / 15
wall2
| i > smooth && (rem i interDeUni /= 0) = wall1
| otherwise = fst $ runDelaunay box ps2 psID2
in (ps1,ps2,wall2)
in (arrF1, wallF1)
runPacker2D :: Int -> Box Vec2 -> SetPoint Vec2 -> IntMap (S2 Vec2) -> (SetPoint Vec2, IntMap (S2 Vec2))
runPacker2D = runPacker
force :: (Packer v, Norm Double v) => WPoint v -> WPoint v -> v Double
force ref x
| freeDist <= 0 = let
f = 4 * freeDist
-- f < 0 if x < 0
in dir &* (-1*f*f)
| otherwise = let
f = freeDist
in dir &* f
where
-- attraction direction
delta = point x &- point ref
dir = normalize delta
ldelta = norm delta
totalR = radius x + radius ref
freeDist = ldelta - totalR
evalForce :: (Packer v, Norm Double v) => SetPoint v -> PointPointer -> IntSet -> v Double
evalForce sp a ns = let
func acc x = acc &+ force (sp ! a) (sp ! x)
f = IS.foldl' func zero ns
in f &* (1 / radius (sp ! a))
setForce :: (Packer v, Norm Double v) => IntMap (S2 v) -> SetPoint v -> Vector (Vector (v Double))
setForce tri sp = let
conn = findPointConn tri
update i = case IM.lookup i conn of
Just ps -> V.map (\n -> force (sp!i) (sp!n)) . V.fromList . IS.toList $ ps -- evalForce sp i ps
_ -> V.empty -- zero
in V.generate (V.length sp) update
setDisp :: (Packer v, Norm Double v) => IntMap (S2 v) -> SetPoint v -> Vector (v Double)
setDisp tri sp = let
conn = findPointConn tri
update i = case IM.lookup i conn of
Just ps -> getDisplacement sp i ps 0.2
_ -> zero
in V.generate (V.length sp) update
getDisplacement :: (Packer v, Norm Double v) => SetPoint v -> Int -> IntSet -> Double -> v Double
getDisplacement sp i ps time = let
a = evalForce sp i ps
disp = a &* (time*time)
l = vlen disp
r = radius $ sp!i
-- cut-off displacements bigger than 2*r. It avoids multiple sphere
-- overlaping on the coners due "keepInBox" restriction
in if l > 0.5*r then disp &* (0.5*r/l) else disp
updateSP :: (Packer v, Norm Double v) => Box v -> IntMap (S2 v) -> SetPoint v -> SetPoint v
-> Double -> Double -> SetPoint v
updateSP box tri sp0 sp1 damp time = let
conn = findPointConn tri
update i = let
p0 = sp0!.i
p1 = sp1!.i
new ps = let
-- Verlet integration with damping effect (damp = [0,1])
deltaPos = getDisplacement sp1 i ps time
newP = ((2-damp) *& p1) &- ((1-damp) *& p0) &+ deltaPos
in keepInBox box newP
in case IM.lookup i conn of
Just ps -> (sp1 ! i) {point = new ps}
_ -> sp1 ! i
in V.generate (V.length sp1) update
class (PointND v)=> Packer v where
findPointConn :: IntMap (S2 v) -> IntMap IntSet
keepInBox :: Box v -> v Double -> v Double
instance Packer Vec2 where
findPointConn = let
func acc x = let
(a, b, c) = face2DPoints x
add ref to = IM.insertWith IS.union ref (IS.singleton to)
in add a b $ add b c $ add c a $
add b a $ add c b $ add a c acc
in IM.foldl' func IM.empty
keepInBox Box2D{..} (Vec2 x y) = let
forceInBox a minA maxA
| a > maxA = maxA
| a < minA = minA
| otherwise = a
newX = forceInBox x xMin2D xMax2D
newY = forceInBox y yMin2D yMax2D
in (Vec2 newX newY)
instance Packer Vec3 where
findPointConn = let
func acc x = let
(a, b, c, d) = tetraPoints x
add ref to = IM.insertWith IS.union ref (IS.singleton to)
in add a b $ add b c $ add c a $
add b a $ add c b $ add a c $
add a d $ add b d $ add c d $
add d a $ add d b $ add d c acc
in IM.foldl' func IM.empty
keepInBox Box3D{..} (Vec3 x y z) = let
forceInBox a minA maxA
| a > maxA = maxA
| a < minA = minA
| otherwise = a
newX = forceInBox x xMin3D xMax3D
newY = forceInBox y yMin3D yMax3D
newZ = forceInBox z zMin3D zMax3D
in (Vec3 newX newY newZ)
-- ============================= Testing ================================
testForce :: [Double] -> [Double]
testForce = map (norm . force (WPoint 1 (Vec2 0 0)) . WPoint 5 . Vec2 0.1)
icosahedron :: Double -> Vec3D -> (Vector Vec3D, Vector (Int, Int, Int))
icosahedron r pos = let
t = (1 + sqrt 5) / 2 -- golden ratio
k = 1 / sqrt (1 + t*t) -- correct the radius. For (0, 1, phi) the edge
-- is 2 then the radius is sqrt(1+phi*phi)
points = V.fromList [ (t , 1, 0), (-t, 1, 0), (t, -1, 0), (-t, -1, 0)
, (1, 0, t), (1, 0, -t), (-1, 0, t), (-1, 0, -t)
, (0, t, 1), ( 0, -t, 1), (0, t, -1), (0, -t, -1) ]
tri = V.fromList [ (0, 8, 4), (0, 5, 10), (2, 4, 9), (2, 11, 5)
, (1, 6, 8), (1, 10, 7), (3, 9, 6), (3, 7, 11)
, (0, 10, 8), (1, 8, 10), (2, 9, 11), (3, 9, 11)
, (4, 2, 0), (5, 0, 2), (6, 1, 3), (7, 3, 1)
, (8, 6, 4), (9, 4, 6), (10, 5, 7), (11, 7, 5) ]
in (V.map ((pos &+) . (r * k *&) . (\(x,y,z) -> Vec3 x y z)) points, tri)
writeWPointsVTKfile :: String -> SetPoint Vec3 -> IO ()
writeWPointsVTKfile file points = let
vtks = V.imap foo points
foo nid x = let
(ps, cs) = icosahedron (radius x) (point x)
psU = V.convert ps
attr = mkCellAttr "GrainID" (\_ _ _ -> nid)
in mkUGVTK "WPoint" psU cs [] [attr]
in writeMultiVTKfile file True vtks
| lostbean/VirMat | src/VirMat/Core/Packer.hs | gpl-3.0 | 6,418 | 0 | 23 | 1,995 | 2,940 | 1,537 | 1,403 | 156 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.BlogLiterately.Options
-- Copyright : (c) 2008-2010 Robert Greayer, 2012 Brent Yorgey
-- License : GPL (see LICENSE)
-- Maintainer : Brent Yorgey <[email protected]>
--
-- Configuation and command-line options.
--
-----------------------------------------------------------------------------
module Text.BlogLiterately.Options
(
-- * BlogLiterately options record
BlogLiterately(..), blOpts
-- ** Lenses
-- $lenses
, style
, hsHighlight
, otherHighlight
, litHaskell
, toc
, rawlatex
, wplatex
, math
, ghci
, uploadImages
, categories
, tags
, blogid
, profile
, blog
, user
, password
, title
, file
, format
, postid
, page
, publish
, htmlOnly
, citations
, xtra
-- ** Default accessors
-- $defaccess
, style'
, hsHighlight'
, otherHighlight'
, litHaskell'
, toc'
, rawlatex'
, wplatex'
, math'
, ghci'
, uploadImages'
, blogid'
, profile'
, blog'
, user'
, password'
, title'
, file'
, format'
, postid'
, page'
, publish'
, htmlOnly'
, citations'
)
where
import Control.Lens (makeLenses, view)
import Control.Monad (mplus)
import Data.Maybe (fromMaybe)
import qualified Data.Semigroup as Semi
import Data.Version
import Paths_BlogLiterately (version)
import System.Console.CmdArgs
import Text.BlogLiterately.Highlight
-- | Configuration record (and command-line options) for @BlogLiterately@.
data BlogLiterately = BlogLiterately
{ _style :: Maybe String -- ^ Name of a style file
, _hsHighlight :: Maybe HsHighlight -- ^ Haskell highlighting mode
, _otherHighlight :: Maybe Bool -- ^ Use highlighting-kate for
-- non-Haskell?
, _litHaskell :: Maybe Bool -- ^ Parse as literate Haskell?
, _toc :: Maybe Bool -- ^ Generate a table of contents?
, _rawlatex :: Maybe Bool -- ^ Pass LaTeX through unchanged?
, _wplatex :: Maybe Bool -- ^ Format LaTeX for WordPress?
, _math :: Maybe String -- ^ Indicate how to format math
, _ghci :: Maybe Bool -- ^ Automatically process ghci sessions?
, _uploadImages :: Maybe Bool -- ^ Automatically upload images?
, _categories :: [String] -- ^ Categories for the post
, _tags :: [String] -- ^ Tags for the post
, _blogid :: Maybe String -- ^ Blog-specific identifier
-- (e.g. for blogging software
-- handling multiple blogs)
, _profile :: Maybe String -- ^ Name of profile to use.
, _blog :: Maybe String -- ^ Blog xmlrpc URL
, _user :: Maybe String -- ^ Blog user name
, _password :: Maybe String -- ^ Blog password (omit to be interactively prompted)
, _title :: Maybe String -- ^ Post title
, _file :: Maybe String -- ^ File to post
, _format :: Maybe String -- ^ Format of the file
-- (currently supported:
-- markdown, rst)
, _postid :: Maybe String -- ^ ID of a post to update
, _page :: Maybe Bool -- ^ Create a \"page\" instead of a post
, _publish :: Maybe Bool -- ^ Should the post be published?
-- (Otherwise it is uploaded as a draft.)
, _htmlOnly :: Maybe Bool -- ^ Don't upload anything;
-- just output HTML to
-- stdout.
, _citations :: Maybe Bool -- ^ Process citations? (default: true)
, _xtra :: [String] -- ^ Extension arguments, for use e.g. by
-- custom transforms
}
deriving (Show,Data,Typeable)
-- $lenses
-- We derive lenses for all the @BlogLiterately@ fields using the
-- @lens@ library.
makeLenses ''BlogLiterately
instance Semi.Semigroup BlogLiterately where
bl1 <> bl2 =
BlogLiterately
{ _style = combine _style
, _hsHighlight = combine _hsHighlight
, _otherHighlight = combine _otherHighlight
, _litHaskell = combine _litHaskell
, _toc = combine _toc
, _rawlatex = combine _rawlatex
, _wplatex = combine _wplatex
, _math = combine _math
, _ghci = combine _ghci
, _uploadImages = combine _uploadImages
, _categories = combine _categories
, _tags = combine _tags
, _blogid = combine _blogid
, _profile = combine _profile
, _blog = combine _blog
, _user = combine _user
, _password = combine _password
, _title = combine _title
, _file = combine _file
, _format = combine _format
, _postid = combine _postid
, _page = combine _page
, _publish = combine _publish
, _htmlOnly = combine _htmlOnly
, _citations = combine _citations
, _xtra = combine _xtra
}
where combine f = f bl1 `mplus` f bl2
instance Monoid BlogLiterately where
mempty =
BlogLiterately
{ _style = Nothing
, _hsHighlight = Nothing
, _otherHighlight = Nothing
, _litHaskell = Nothing
, _toc = Nothing
, _rawlatex = Nothing
, _wplatex = Nothing
, _math = Nothing
, _ghci = Nothing
, _uploadImages = Nothing
, _categories = []
, _tags = []
, _blogid = Nothing
, _profile = Nothing
, _blog = Nothing
, _user = Nothing
, _password = Nothing
, _title = Nothing
, _file = Nothing
, _format = Nothing
, _postid = Nothing
, _page = Nothing
, _publish = Nothing
, _htmlOnly = Nothing
, _citations = Nothing
, _xtra = []
}
mappend = (Semi.<>)
--------------------------------------------------
-- Default accessors
--------------------------------------------------
-- $defaccess
-- Some convenient accessors that strip off the Maybe and return an
-- appropriate default value.
style' :: BlogLiterately -> String
style' = fromMaybe "" . view style
hsHighlight' :: BlogLiterately -> HsHighlight
hsHighlight' = fromMaybe (HsColourInline defaultStylePrefs) . view hsHighlight
otherHighlight' :: BlogLiterately -> Bool
otherHighlight' = fromMaybe True . view otherHighlight
litHaskell' :: BlogLiterately -> Bool
litHaskell' = fromMaybe True . view litHaskell
toc' :: BlogLiterately -> Bool
toc' = fromMaybe False . view toc
rawlatex' :: BlogLiterately -> Bool
rawlatex' = fromMaybe False . view rawlatex
wplatex' :: BlogLiterately -> Bool
wplatex' = fromMaybe False . view wplatex
math' :: BlogLiterately -> String
math' = fromMaybe "" . view math
ghci' :: BlogLiterately -> Bool
ghci' = fromMaybe False . view ghci
uploadImages' :: BlogLiterately -> Bool
uploadImages' = fromMaybe False . view uploadImages
blogid' :: BlogLiterately -> String
blogid' = fromMaybe "default" . view blogid
profile' :: BlogLiterately -> String
profile' = fromMaybe "" . view profile
blog' :: BlogLiterately -> String
blog' = fromMaybe "" . view blog
user' :: BlogLiterately -> String
user' = fromMaybe "" . view user
password' :: BlogLiterately -> String
password' = fromMaybe "" . view password
title' :: BlogLiterately -> String
title' = fromMaybe "" . view title
file' :: BlogLiterately -> String
file' = fromMaybe "" . view file
format' :: BlogLiterately -> String
format' = fromMaybe "" . view format
postid' :: BlogLiterately -> String
postid' = fromMaybe "" . view postid
page' :: BlogLiterately -> Bool
page' = fromMaybe False . view page
publish' :: BlogLiterately -> Bool
publish' = fromMaybe False . view publish
htmlOnly' :: BlogLiterately -> Bool
htmlOnly' = fromMaybe False . view htmlOnly
citations' :: BlogLiterately -> Bool
citations' = fromMaybe True . view citations
-- | Command-line configuration for use with @cmdargs@.
blOpts :: BlogLiterately
blOpts = BlogLiterately
{ _style = def &= help "style specification (for --hscolour-icss)"
&= typFile
&= name "style" &= name "s" &= explicit
, _hsHighlight = enum
[ Nothing
&= explicit
&= name "hscolour-icss"
&= help "highlight haskell: hscolour, inline style (default)"
, Just HsColourCSS
&= explicit
&= name "hscolour-css"
&= help "highlight haskell: hscolour, separate stylesheet"
, Just HsNoHighlight
&= explicit
&= name "hs-nohighlight"
&= help "no haskell highlighting"
, Just HsKate
&= explicit
&= name "hs-kate"
&= help "highlight haskell with highlighting-kate"
]
, _otherHighlight = enum
[ Nothing
&= explicit
&= name "kate"
&= help "highlight non-Haskell code with highlighting-kate (default)"
, Just False
&= explicit
&= name "no-kate"
&= help "don't highlight non-Haskell code"
]
, _toc = enum
[ Nothing
&= name "no-toc"
&= help "don't generate a table of contents (default)"
&= explicit
, Just True
&= name "toc"
&= help "generate a table of contents"
&= explicit
]
, _rawlatex = def &= help "pass inline/display LaTeX through unchanged"
&= name "rawlatex" &= name "r" &= explicit
, _wplatex = def &= help "reformat inline LaTeX the way WordPress expects"
&= name "wplatex" &= name "w" &= explicit
, _math = def &= help "how to layout math, where --math=<pandoc-option>[=URL]"
&= name "math" &= name "m" &= explicit
, _litHaskell = enum
[ Nothing
&= help "parse as literate Haskell (default)"
&= name "lit-haskell"
&= explicit
, Just False
&= help "do not parse as literate Haskell"
&= name "no-lit-haskell"
&= explicit
]
, _ghci = def &= help "run [ghci] blocks through ghci and include output"
&= name "ghci" &= name "g" &= explicit
, _uploadImages = def &= name "upload-images" &= name "I" &= explicit &= help "upload local images"
, _page = def &= help "create a \"page\" instead of a post (WordPress only)"
&= name "page" &= explicit
, _publish = def &= help "publish post (otherwise it's uploaded as a draft)"
&= name "publish" &= explicit
, _htmlOnly = def &= help "don't upload anything; output HTML to stdout"
&= name "html-only" &= name "h" &= explicit
, _categories = def
&= explicit
&= name "category" &= name "C"
&= help "post category (can specify more than one)"
, _tags = def
&= explicit
&= name "tag" &= name "T"
&= help "tag (can specify more than one)"
, _citations = enum
[ Nothing
&= help "process citations (default)"
&= name "citations"
&= explicit
, Just False
&= help "do not process citations"
&= name "no-citations"
&= explicit
]
, _xtra = def
&= help "extension arguments, for use with custom extensions"
&= name "xtra" &= name "x" &= explicit
, _blogid = def &= help "Blog specific identifier" &= typ "ID"
&= name "blogid" &= explicit
, _postid = def &= help "Post to replace (if any)" &= typ "ID"
&= name "postid" &= name "i" &= explicit
, _profile = def &= typ "STRING" &= help "profile to use"
&= name "profile" &= name "P" &= explicit
, _blog = def &= typ "URL" &= help "blog XML-RPC url (if omitted, HTML goes to stdout)"
&= name "blog" &= name "b" &= explicit
, _user = def &= typ "USER" &= help "user name"
&= name "user" &= name "u" &= explicit
, _password = def &= typ "PASSWORD" &= help "password"
&= name "password" &= name "p" &= explicit
, _title = def &= typ "TITLE" &= help "post title"
&= name "title" &= name "t" &= explicit
, _format = def &= typ "FORMAT" &= help "input format: markdown or rst"
&= name "format" &= name "f" &= explicit
, _file = def &= argPos 0 &= typ "FILE"
}
&= program "BlogLiterately"
&= summary ("BlogLierately v" ++ showVersion version ++ ", (c) Robert Greayer 2008-2010, Brent Yorgey 2012-2013\n" ++
"For help, see http://byorgey.wordpress.com/blogliterately/")
| byorgey/BlogLiterately | src/Text/BlogLiterately/Options.hs | gpl-3.0 | 13,763 | 0 | 14 | 4,939 | 2,625 | 1,418 | 1,207 | 301 | 1 |
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
module ProceduralSpec (spec) where
import Test.Hspec
import Language.Mulang.Parsers.JavaScript
import Language.Mulang.Identifier
import Language.Mulang.Ast
import Language.Mulang.Inspector.Literal
import Language.Mulang.Inspector.Matcher
import Language.Mulang.Inspector.Procedural
spec :: Spec
spec = do
describe "usesForLoop" $ do
it "is True when present in function" $ do
usesForLoop (js "function f() { for(;;) { console.log('foo') } }") `shouldBe` True
it "is True when present in lambda" $ do
usesForLoop (js "let f = function() { for(;;) { console.log('foo') } }") `shouldBe` True
it "is True when present in object" $ do
usesForLoop (js "let x = {f: function() { for(;;) { console.log('foo') } }}") `shouldBe` True
it "is True when present in method" $ do
usesForLoop (js "let o = {f: function() { for(;;) { console.log('foo') } }}") `shouldBe` True
it "is False when not present in function" $ do
usesForLoop (js "function f() {}") `shouldBe` False
describe "usesLoop" $ do
it "is True when repeat is present" $ do
let code = SimpleFunction "f" [] (Sequence [Repeat (MuNumber 2) None, Return (MuNumber 2)])
usesLoop code `shouldBe` True
it "is True when foreach is present" $ do
let code = SimpleFunction "f" [] (Sequence [
For [Generator (VariablePattern "x") (MuList [MuNumber 2])] None,
Return (MuNumber 2)
])
usesLoop code `shouldBe` True
it "is True when for is present" $ do
let code = js "function f() { for(;;); }"
usesLoop code `shouldBe` True
it "is True when while is present" $ do
let code = js "function f() { while(true); }"
usesLoop code `shouldBe` True
it "is True when a for-of is present" $ do
let code = js "function printAll(list) { for (let e of list) { console.log(e) } }"
usesForEach code `shouldBe` True
usesLoop code `shouldBe` True
it "is False when none of the aforementioned are present" $ do
let code = js "function f(x){return 1;}"
usesLoop code `shouldBe` False
describe "usesRepeat" $ do
it "is True when present in function" $ do
let code = SimpleFunction "f" [] (Sequence [Repeat (MuNumber 2) None, Return (MuNumber 2)])
usesRepeat code `shouldBe` True
it "is False when not present in function" $ do
let code = js "function f(x){return 1;}"
usesRepeat code `shouldBe` False
describe "usesSwitch" $ do
it "is True when present in function" $ do
let code = Switch (Reference "x") [(None, MuNumber 0)] None
usesSwitch code `shouldBe` True
it "is False when not present in function" $ do
let code = js "function f(x){return 1;}"
usesSwitch code `shouldBe` False
describe "declaresProcedure" $ do
describe "with procedure declarations" $ do
it "is True when procedure is declared" $ do
let code = js "function f(){}"
declaresProcedure (named "f") code `shouldBe` True
it "is False when procedures is not declared" $ do
let code = js "function f(){}"
declaresProcedure (named "g") code `shouldBe` False
it "is False when using a matcher and procedure does not have a body" $ do
(declaresProcedureMatching (with . isNumber $ 2) anyone) (js "function f() {}") `shouldBe` False
describe "usesWhile" $ do
it "is True when present in function" $ do
usesWhile (js "function f() { while(true) { console.log('foo') } }") `shouldBe` True
it "is True when present in lambda" $ do
usesWhile (js "let f = function() { while(true) { console.log('foo') } }") `shouldBe` True
it "is True when present in object" $ do
usesWhile (js "let x = {f: function() { while(true) { console.log('foo') } }}") `shouldBe` True
it "is True when present in method" $ do
usesWhile (js "let o = {f: function() { while(true) { console.log('foo') } }}") `shouldBe` True
it "is False when not present in function" $ do
usesWhile (js "function f() {}") `shouldBe` False
| mumuki/mulang | spec/ProceduralSpec.hs | gpl-3.0 | 4,313 | 0 | 26 | 1,195 | 1,054 | 498 | 556 | 79 | 1 |
module Handler.ProgramChairSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getProgramChairR" $ do
error "Spec not implemented: getProgramChairR"
describe "postProgramChairR" $ do
error "Spec not implemented: postProgramChairR"
| ackao/APRICoT | web/conference-management-system/test/Handler/ProgramChairSpec.hs | gpl-3.0 | 288 | 0 | 11 | 62 | 60 | 29 | 31 | 8 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-|
Module : System.Console.SneakyTerm.Tile
Maintainer : [email protected]
-}
module System.Console.SneakyTerm.Tile where
import ClassyPrelude
import Control.Lens (makeLenses)
import System.Console.SneakyTerm.ColorPair
import System.Console.SneakyTerm.PointInt
-- | Represents a character on the screen
data Tile = Tile {
_tilePosition :: !PointInt -- ^ Tile position
, _tileCharacter :: !Char -- ^ Which character to display
, _tileColor :: ColorPair -- ^ Color for the character
} deriving(Eq,Show,Read)
$(makeLenses ''Tile)
| pmiddend/sneakyterm | src/System/Console/SneakyTerm/Tile.hs | gpl-3.0 | 641 | 0 | 9 | 140 | 97 | 60 | 37 | 16 | 0 |
import Numeric.Natural
collatz :: Natural -> [Natural]
collatz 1 = [1]
collatz x
| even x = x : collatz (x `div` 2)
| odd x = x : collatz (x * 3 + 1)
formatCollatz :: Natural -> IO ()
formatCollatz x = putStrLn (show x ++ ": " ++ (show . length . collatz) x)
main :: IO ()
main = mapM_ formatCollatz [1..]
| zeroxfourc/ludvigit | src/collatzall.hs | gpl-3.0 | 311 | 0 | 11 | 71 | 168 | 84 | 84 | 10 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudShell.Operations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ <https://cloud.google.com/shell/docs/ Cloud Shell API Reference> for @cloudshell.operations.get@.
module Network.Google.Resource.CloudShell.Operations.Get
(
-- * REST Resource
OperationsGetResource
-- * Creating a Request
, operationsGet
, OperationsGet
-- * Request Lenses
, ogXgafv
, ogUploadProtocol
, ogAccessToken
, ogUploadType
, ogName
, ogCallback
) where
import Network.Google.CloudShell.Types
import Network.Google.Prelude
-- | A resource alias for @cloudshell.operations.get@ method which the
-- 'OperationsGet' request conforms to.
type OperationsGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Operation
-- | Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ 'operationsGet' smart constructor.
data OperationsGet =
OperationsGet'
{ _ogXgafv :: !(Maybe Xgafv)
, _ogUploadProtocol :: !(Maybe Text)
, _ogAccessToken :: !(Maybe Text)
, _ogUploadType :: !(Maybe Text)
, _ogName :: !Text
, _ogCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OperationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ogXgafv'
--
-- * 'ogUploadProtocol'
--
-- * 'ogAccessToken'
--
-- * 'ogUploadType'
--
-- * 'ogName'
--
-- * 'ogCallback'
operationsGet
:: Text -- ^ 'ogName'
-> OperationsGet
operationsGet pOgName_ =
OperationsGet'
{ _ogXgafv = Nothing
, _ogUploadProtocol = Nothing
, _ogAccessToken = Nothing
, _ogUploadType = Nothing
, _ogName = pOgName_
, _ogCallback = Nothing
}
-- | V1 error format.
ogXgafv :: Lens' OperationsGet (Maybe Xgafv)
ogXgafv = lens _ogXgafv (\ s a -> s{_ogXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ogUploadProtocol :: Lens' OperationsGet (Maybe Text)
ogUploadProtocol
= lens _ogUploadProtocol
(\ s a -> s{_ogUploadProtocol = a})
-- | OAuth access token.
ogAccessToken :: Lens' OperationsGet (Maybe Text)
ogAccessToken
= lens _ogAccessToken
(\ s a -> s{_ogAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ogUploadType :: Lens' OperationsGet (Maybe Text)
ogUploadType
= lens _ogUploadType (\ s a -> s{_ogUploadType = a})
-- | The name of the operation resource.
ogName :: Lens' OperationsGet Text
ogName = lens _ogName (\ s a -> s{_ogName = a})
-- | JSONP
ogCallback :: Lens' OperationsGet (Maybe Text)
ogCallback
= lens _ogCallback (\ s a -> s{_ogCallback = a})
instance GoogleRequest OperationsGet where
type Rs OperationsGet = Operation
type Scopes OperationsGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient OperationsGet'{..}
= go _ogName _ogXgafv _ogUploadProtocol
_ogAccessToken
_ogUploadType
_ogCallback
(Just AltJSON)
cloudShellService
where go
= buildClient (Proxy :: Proxy OperationsGetResource)
mempty
| brendanhay/gogol | gogol-cloudshell/gen/Network/Google/Resource/CloudShell/Operations/Get.hs | mpl-2.0 | 4,468 | 0 | 15 | 1,040 | 698 | 409 | 289 | 98 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Logging.Folders.Logs.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes all the log entries in a log for the _Default Log Bucket. The
-- log reappears if it receives new entries. Log entries written shortly
-- before the delete operation might not be deleted. Entries received after
-- the delete operation with a timestamp before the operation will be
-- deleted.
--
-- /See:/ <https://cloud.google.com/logging/docs/ Cloud Logging API Reference> for @logging.folders.logs.delete@.
module Network.Google.Resource.Logging.Folders.Logs.Delete
(
-- * REST Resource
FoldersLogsDeleteResource
-- * Creating a Request
, foldersLogsDelete
, FoldersLogsDelete
-- * Request Lenses
, fldXgafv
, fldUploadProtocol
, fldAccessToken
, fldUploadType
, fldLogName
, fldCallback
) where
import Network.Google.Logging.Types
import Network.Google.Prelude
-- | A resource alias for @logging.folders.logs.delete@ method which the
-- 'FoldersLogsDelete' request conforms to.
type FoldersLogsDeleteResource =
"v2" :>
Capture "logName" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes all the log entries in a log for the _Default Log Bucket. The
-- log reappears if it receives new entries. Log entries written shortly
-- before the delete operation might not be deleted. Entries received after
-- the delete operation with a timestamp before the operation will be
-- deleted.
--
-- /See:/ 'foldersLogsDelete' smart constructor.
data FoldersLogsDelete =
FoldersLogsDelete'
{ _fldXgafv :: !(Maybe Xgafv)
, _fldUploadProtocol :: !(Maybe Text)
, _fldAccessToken :: !(Maybe Text)
, _fldUploadType :: !(Maybe Text)
, _fldLogName :: !Text
, _fldCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'FoldersLogsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fldXgafv'
--
-- * 'fldUploadProtocol'
--
-- * 'fldAccessToken'
--
-- * 'fldUploadType'
--
-- * 'fldLogName'
--
-- * 'fldCallback'
foldersLogsDelete
:: Text -- ^ 'fldLogName'
-> FoldersLogsDelete
foldersLogsDelete pFldLogName_ =
FoldersLogsDelete'
{ _fldXgafv = Nothing
, _fldUploadProtocol = Nothing
, _fldAccessToken = Nothing
, _fldUploadType = Nothing
, _fldLogName = pFldLogName_
, _fldCallback = Nothing
}
-- | V1 error format.
fldXgafv :: Lens' FoldersLogsDelete (Maybe Xgafv)
fldXgafv = lens _fldXgafv (\ s a -> s{_fldXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
fldUploadProtocol :: Lens' FoldersLogsDelete (Maybe Text)
fldUploadProtocol
= lens _fldUploadProtocol
(\ s a -> s{_fldUploadProtocol = a})
-- | OAuth access token.
fldAccessToken :: Lens' FoldersLogsDelete (Maybe Text)
fldAccessToken
= lens _fldAccessToken
(\ s a -> s{_fldAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
fldUploadType :: Lens' FoldersLogsDelete (Maybe Text)
fldUploadType
= lens _fldUploadType
(\ s a -> s{_fldUploadType = a})
-- | Required. The resource name of the log to delete:
-- projects\/[PROJECT_ID]\/logs\/[LOG_ID]
-- organizations\/[ORGANIZATION_ID]\/logs\/[LOG_ID]
-- billingAccounts\/[BILLING_ACCOUNT_ID]\/logs\/[LOG_ID]
-- folders\/[FOLDER_ID]\/logs\/[LOG_ID][LOG_ID] must be URL-encoded. For
-- example, \"projects\/my-project-id\/logs\/syslog\",
-- \"organizations\/123\/logs\/cloudaudit.googleapis.com%2Factivity\".For
-- more information about log names, see LogEntry.
fldLogName :: Lens' FoldersLogsDelete Text
fldLogName
= lens _fldLogName (\ s a -> s{_fldLogName = a})
-- | JSONP
fldCallback :: Lens' FoldersLogsDelete (Maybe Text)
fldCallback
= lens _fldCallback (\ s a -> s{_fldCallback = a})
instance GoogleRequest FoldersLogsDelete where
type Rs FoldersLogsDelete = Empty
type Scopes FoldersLogsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/logging.admin"]
requestClient FoldersLogsDelete'{..}
= go _fldLogName _fldXgafv _fldUploadProtocol
_fldAccessToken
_fldUploadType
_fldCallback
(Just AltJSON)
loggingService
where go
= buildClient
(Proxy :: Proxy FoldersLogsDeleteResource)
mempty
| brendanhay/gogol | gogol-logging/gen/Network/Google/Resource/Logging/Folders/Logs/Delete.hs | mpl-2.0 | 5,451 | 0 | 15 | 1,162 | 713 | 423 | 290 | 102 | 1 |
module Graphics.Blobs.Shape
(
Shape(..)
, ShapeStyle(..)
, circle
, logicalDraw
, logicalLineSegments
) where
import Graphics.Blobs.CommonIO
import qualified Graphics.UI.WX as WX
import Graphics.UI.WXCore hiding (Colour)
--import Graphics.UI.WXCore.Draw
import Graphics.Blobs.Math
import Text.Parse
--import Text.XML.HaXml.XmlContent
--import NetworkFile
import Graphics.Blobs.Colors
import Graphics.Blobs.Constants
import Text.XML.HaXml.Types
import qualified Text.XML.HaXml.XmlContent.Haskell as XML
import Data.List(isPrefixOf)
data Shape =
Circle { shapeStyle :: ShapeStyle, shapeRadius :: Double }
| Polygon { shapeStyle :: ShapeStyle, shapePerimeter :: [DoublePoint] }
-- centred on (0,0)
| Lines { shapeStyle :: ShapeStyle, shapePerimeter :: [DoublePoint] }
-- no fill for open shape
| Composite { shapeSegments :: [Shape] } -- drawn in given order
deriving (Eq, Show, Read)
data ShapeStyle = ShapeStyle
{ styleStrokeWidth :: Int
, styleStrokeColour :: Colour
, styleFill :: Colour
}
deriving (Eq, Show, Read)
instance Parse Shape where
parse = oneOf
[ do{ isWord "Circle"
; return Circle
`discard` isWord "{" `apply` field "shapeStyle"
`discard` isWord "," `apply` field "shapeRadius"
`discard` isWord "}"
}
, do{ isWord "Polygon"
; return Polygon
`discard` isWord "{" `apply` field "shapeStyle"
`discard` isWord "," `apply` field "shapePerimeter"
`discard` isWord "}"
}
, do{ isWord "Lines"
; return Lines
`discard` isWord "{" `apply` field "shapeStyle"
`discard` isWord "," `apply` field "shapePerimeter"
`discard` isWord "}"
}
, do{ isWord "Composite"
; return Composite
`discard` isWord "{" `apply` field "shapeSegments"
`discard` isWord "}"
}
] `adjustErr` (++"\nexpected a Shape (Circle,Polygon,Lines,Composite)")
instance Parse ShapeStyle where
parse = do{ isWord "ShapeStyle"
; return ShapeStyle
`discard` isWord "{" `apply` field "styleStrokeWidth"
`discard` isWord "," `apply` field "styleStrokeColour"
`discard` isWord "," `apply` field "styleFill"
`discard` isWord "}"
}
{-
instance HTypeable Shape where
toHType s = Defined "Shape" [] [ Constr "Circle" [] []
, Constr "Polygon" [] []
, Constr "Lines" [] []
, Constr "Composite" [] []
]
instance XmlContent Shape where
toContents s@(Circle{}) =
[ mkElemC "Circle" (toContents (shapeStyle s)
++ [mkElemC "radius" (toContents (shapeRadius s))]) ]
toContents s@(Polygon{}) =
[ mkElemC "Polygon" (toContents (shapeStyle s)
++ [mkElemC "perimeter" (concatMap toContents
(shapePerimeter s))]) ]
toContents s@(Lines{}) =
[ mkElemC "Lines" (toContents (shapeStyle s)
++ [mkElemC "perimeter" (concatMap toContents
(shapePerimeter s))]) ]
toContents s@(Composite{}) =
[ mkElemC "Composite" (concatMap toContents (shapeSegments s)) ]
parseContents = do
{ e@(Elem t _ _) <- element ["Circle","Polygon","Lines","Composite"]
; case t of
"Circle" -> interior e $
do{ style <- parseContents
; r <- inElement "radius" parseContents
; return (Circle {shapeStyle=style, shapeRadius=r})
}
"Polygon" -> interior e $
do{ style <- parseContents
; p <- inElement "perimeter" $ many1 parseContents
; return (Polygon {shapeStyle=style, shapePerimeter=p})
}
"Lines" -> interior e $
do{ style <- parseContents
; p <- inElement "perimeter" $ many1 parseContents
; return (Lines {shapeStyle=style, shapePerimeter=p})
}
"Composite" -> interior e $ do{ ss <- many1 parseContents
; return (Composite {shapeSegments=ss})
}
}
instance HTypeable ShapeStyle where
toHType s = Defined "ShapeStyle" [] [Constr "ShapeStyle" [] []]
instance XmlContent ShapeStyle where
toContents s =
[ mkElemC "ShapeStyle"
[ mkElemC "StrokeWidth" (toContents (styleStrokeWidth s))
, mkElemC "StrokeColour" (toContents (styleStrokeColour s))
, mkElemC "Fill" (toContents (styleFill s))
]
]
parseContents = inElement "ShapeStyle" $ do
{ w <- inElement "StrokeWidth" parseContents
; c <- inElement "StrokeColour" parseContents
; f <- inElement "Fill" parseContents
; return (ShapeStyle { styleStrokeWidth=w, styleStrokeColour=c
, styleFill=f })
}
-}
logicalDraw :: Size -> DC () -> DoublePoint -> Shape -> [WX.Prop (DC ())] -> IO ()
logicalDraw ppi dc centrePoint shape options =
case shape of
Circle {} -> WX.circle dc (logicalToScreenPoint ppi centrePoint)
(logicalToScreenX ppi (shapeRadius shape))
(style2options (shapeStyle shape)++options)
Polygon {} -> WX.polygon dc (map (logicalToScreenPoint ppi
. translate centrePoint)
(shapePerimeter shape))
(style2options (shapeStyle shape)++options)
Lines {} -> logicalLineSegments ppi dc (map (translate centrePoint)
(shapePerimeter shape))
(style2options (shapeStyle shape)++options)
Composite {}-> mapM_ (\s-> logicalDraw ppi dc centrePoint s options)
(shapeSegments shape)
logicalLineSegments :: Size -> DC () -> [DoublePoint] -> [WX.Prop (DC ())] -> IO ()
logicalLineSegments _ _ [_p] _options = return ()
logicalLineSegments _ _ [ ] _options = return ()
logicalLineSegments ppi dc (fromPoint:toPoint:ps) options =
do{ WX.line dc (logicalToScreenPoint ppi fromPoint)
(logicalToScreenPoint ppi toPoint) options
; logicalLineSegments ppi dc (toPoint:ps) options
}
circle :: Shape
circle = Circle { shapeStyle = defaultShapeStyle
, shapeRadius = kNODE_RADIUS }
style2options :: ShapeStyle -> [WX.Prop (DC ())]
style2options sty =
[ WX.penWidth WX.:= styleStrokeWidth sty
, WX.penColor WX.:= wxcolor (styleStrokeColour sty)
, WX.brushKind WX.:= BrushSolid
, WX.brushColor WX.:= wxcolor (styleFill sty)
]
defaultShapeStyle :: ShapeStyle
defaultShapeStyle =
ShapeStyle { styleStrokeWidth = 1
, styleStrokeColour = licorice
, styleFill = nodeColor }
-- ---------------------------------------------------------------------
-- Orphan instances coming home
{- derived by DrIFT -}
instance XML.HTypeable Shape where
toHType v = XML.Defined "Shape" []
[XML.Constr "Circle" [] [XML.toHType aa,XML.toHType ab]
,XML.Constr "Polygon" [] [XML.toHType ac,XML.toHType ad]
,XML.Constr "Lines" [] [XML.toHType ae,XML.toHType af]
,XML.Constr "Composite" [] [XML.toHType ag]]
where
(Circle aa ab) = v
(Polygon ac ad) = v
(Lines ae af) = v
(Composite ag) = v
instance XML.XmlContent Shape where
parseContents = do
{ e@(Elem (N t) _ _) <- XML.element ["Circle","Polygon","Lines","Composite"]
; case t of
_ | "Polygon" `isPrefixOf` t -> XML.interior e $
do { ac <- XML.parseContents
; ad <- XML.parseContents
; return (Polygon ac ad)
}
| "Lines" `isPrefixOf` t -> XML.interior e $
do { ae <- XML.parseContents
; af <- XML.parseContents
; return (Lines ae af)
}
| "Composite" `isPrefixOf` t -> XML.interior e $
fmap Composite XML.parseContents
| "Circle" `isPrefixOf` t -> XML.interior e $
do { aa <- XML.parseContents
; ab <- XML.parseContents
; return (Circle aa ab)
}
}
toContents v@(Circle aa ab) =
[XML.mkElemC (XML.showConstr 0 (XML.toHType v)) (concat [XML.toContents aa,
XML.toContents ab])]
toContents v@(Polygon ac ad) =
[XML.mkElemC (XML.showConstr 1 (XML.toHType v)) (concat [XML.toContents ac,
XML.toContents ad])]
toContents v@(Lines ae af) =
[XML.mkElemC (XML.showConstr 2 (XML.toHType v)) (concat [XML.toContents ae,
XML.toContents af])]
toContents v@(Composite ag) =
[XML.mkElemC (XML.showConstr 3 (XML.toHType v)) (XML.toContents ag)]
{- derived by DrIFT -}
instance XML.HTypeable ShapeStyle where
toHType v = XML.Defined "ShapeStyle" []
[XML.Constr "ShapeStyle" [] [XML.toHType aa,XML.toHType ab,XML.toHType ac]]
where (ShapeStyle aa ab ac) = v
instance XML.XmlContent ShapeStyle where
parseContents = do
{ XML.inElement "ShapeStyle" $ do
{ aa <- XML.parseContents
; ab <- XML.parseContents
; ac <- XML.parseContents
; return (ShapeStyle aa ab ac)
}
}
toContents v@(ShapeStyle aa ab ac) =
[XML.mkElemC (XML.showConstr 0 (XML.toHType v))
(concat [XML.toContents aa, XML.toContents ab, XML.toContents ac])]
| alanz/Blobs | src/Graphics/Blobs/Shape.hs | lgpl-2.1 | 10,268 | 4 | 17 | 3,648 | 2,197 | 1,164 | 1,033 | 146 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.PersistentVolumeAccessMode where
import GHC.Generics
import qualified Data.Aeson
-- |
data PersistentVolumeAccessMode = PersistentVolumeAccessMode
{
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON PersistentVolumeAccessMode
instance Data.Aeson.ToJSON PersistentVolumeAccessMode
| minhdoboi/deprecated-openshift-haskell-api | kubernetes/lib/Kubernetes/V1/PersistentVolumeAccessMode.hs | apache-2.0 | 493 | 1 | 6 | 60 | 69 | 42 | 27 | 12 | 0 |
module CostasLikeArrays.A320576Spec (main, spec) where
import Test.Hspec
import CostasLikeArrays.A320576 (a320576)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A320576" $
it "correctly computes the first 6 elements" $
map a320576 [1..6] `shouldBe` expectedValue where
expectedValue = [1, 1, 2, 1, 1, 10]
| peterokagey/haskellOEIS | test/CostasLikeArrays/A320576Spec.hs | apache-2.0 | 335 | 0 | 8 | 62 | 112 | 64 | 48 | 10 | 1 |
-- This file is part of "Loopless Functional Algorithms".
-- Copyright (c) 2005 Jamie Snape, Oxford University Computing Laboratory.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- https://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module LooplessKoda where
import List (unfoldr)
import RealTimeQueue
data Rose a = Node a [Rose a]
data Rose' a = Node' a (Queue (Rose' a))
koda = unfoldr step . prolog
prolog = wrapQueue . wrapTree . fst . foldr kmix (Nothing,Nothing)
kmix (Node x ts) (myt,mty) = (myt,mty) `ox` (mxt,mxt) `ox` (foldr kmix (mty,myt) ts)
where mxt = Just (Node' x empty)
(mxt,mtx) `ox` (myt,mty) = (mxt `o` myt,mty `o` mtx)
Nothing `o` myt = myt
mxt `o` Nothing = mxt
Just (Node' x xtq) `o` Just yt = Just (Node' x (insert xtq yt))
wrapTree Nothing = empty
wrapTree (Just xt) = insert empty xt
wrapQueue xtq = consQueue xtq []
consQueue xtq xtqs = if isEmpty xtq then xtqs
else xtq:xtqs
step [] = Nothing
step (xtq:xtqs) = Just (x,consQueue ytq (consQueue ztq xtqs))
where (Node' x ytq,ztq) = remove xtq
| snape/LooplessFunctionalAlgorithms | LooplessKoda.hs | apache-2.0 | 1,644 | 0 | 10 | 422 | 444 | 246 | 198 | 21 | 2 |
module Levels where
import Control.Arrow((***))
import Physics.Motion
import Constants
--import Resources
initialLevel :: Int
initialLevel = 0
numLevels :: Int
numLevels = length levels
data LevelSpec = LevelSpec
{ horizontalWallPos :: [Position]
, verticalWallPos :: [Position]
, positiveInfo :: [(Position, Char)]
, negativeInfo :: [(Position, Char)]
}
levels :: [LevelSpec]
levels = levelTest
levelTest :: [LevelSpec]
levelTest = [
LevelSpec { horizontalWallPos = hWallPosLeveled 0
, verticalWallPos = vWallPosLeveled 0
, positiveInfo = pInfoLeveled 0
, negativeInfo = nInfoLeveled 0
}
]
hWallPosLeveled :: Int -> [Position]
hWallPosLeveled _ = map (\x -> Pos (x,0)) rangeF
++ map (\x -> Pos (x,19)) rangeF
vWallPosLeveled :: Int -> [Position]
vWallPosLeveled _ = map (\x -> Pos ( 0,x)) rangeF
++ map (\x -> Pos ( 19,x)) rangeF
rangeF= [0..19]
pInfoLeveled :: Int -> [(Position, Char)]
pInfoLeveled _ = [(Pos(5,5), '死')]
nInfoLeveled :: Int -> [(Position, Char)]
nInfoLeveled _ = []
| no-moree-ria10/utsuEater | src/Levels.hs | apache-2.0 | 1,174 | 0 | 10 | 335 | 390 | 228 | 162 | 32 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QCheckBox_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:22
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QCheckBox_h where
import Foreign.C.Types
import Qtc.Enums.Base
import Qtc.Enums.Gui.QPaintDevice
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QCheckBox ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QCheckBox_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QCheckBox_unSetUserMethod" qtc_QCheckBox_unSetUserMethod :: Ptr (TQCheckBox a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QCheckBoxSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QCheckBox_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QCheckBox ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QCheckBox_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QCheckBoxSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QCheckBox_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QCheckBox ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QCheckBox_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QCheckBoxSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QCheckBox_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QCheckBox ()) (QCheckBox x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QCheckBox setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QCheckBox_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QCheckBox_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setUserMethod" qtc_QCheckBox_setUserMethod :: Ptr (TQCheckBox a) -> CInt -> Ptr (Ptr (TQCheckBox x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QCheckBox :: (Ptr (TQCheckBox x0) -> IO ()) -> IO (FunPtr (Ptr (TQCheckBox x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QCheckBox_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QCheckBoxSc a) (QCheckBox x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QCheckBox setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QCheckBox_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QCheckBox_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QCheckBox ()) (QCheckBox x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QCheckBox setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QCheckBox_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QCheckBox_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setUserMethodVariant" qtc_QCheckBox_setUserMethodVariant :: Ptr (TQCheckBox a) -> CInt -> Ptr (Ptr (TQCheckBox x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QCheckBox :: (Ptr (TQCheckBox x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQCheckBox x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QCheckBox_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QCheckBoxSc a) (QCheckBox x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QCheckBox setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QCheckBox_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QCheckBox_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QCheckBox ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QCheckBox_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QCheckBox_unSetHandler" qtc_QCheckBox_unSetHandler :: Ptr (TQCheckBox a) -> CWString -> IO (CBool)
instance QunSetHandler (QCheckBoxSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QCheckBox_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler1" qtc_QCheckBox_setHandler1 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox1 :: (Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qevent_h (QCheckBox ()) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_event cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_event" qtc_QCheckBox_event :: Ptr (TQCheckBox a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent_h (QCheckBoxSc a) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_event cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> QEvent t1 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
x1obj <- objectFromPtr_nf x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler2" qtc_QCheckBox_setHandler2 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox2 :: (Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO ()) -> IO (FunPtr (Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> QEvent t1 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQEvent t1) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
x1obj <- objectFromPtr_nf x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QmouseMoveEvent_h (QCheckBox ()) ((QMouseEvent t1)) where
mouseMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mouseMoveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_mouseMoveEvent" qtc_QCheckBox_mouseMoveEvent :: Ptr (TQCheckBox a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseMoveEvent_h (QCheckBoxSc a) ((QMouseEvent t1)) where
mouseMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mouseMoveEvent cobj_x0 cobj_x1
instance QpaintEvent_h (QCheckBox ()) ((QPaintEvent t1)) where
paintEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_paintEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_paintEvent" qtc_QCheckBox_paintEvent :: Ptr (TQCheckBox a) -> Ptr (TQPaintEvent t1) -> IO ()
instance QpaintEvent_h (QCheckBoxSc a) ((QPaintEvent t1)) where
paintEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_paintEvent cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> IO (QSize t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO (Ptr (TQSize t0))
setHandlerWrapper x0
= do x0obj <- qCheckBoxFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler3" qtc_QCheckBox_setHandler3 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> IO (Ptr (TQSize t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox3 :: (Ptr (TQCheckBox x0) -> IO (Ptr (TQSize t0))) -> IO (FunPtr (Ptr (TQCheckBox x0) -> IO (Ptr (TQSize t0))))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> IO (QSize t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO (Ptr (TQSize t0))
setHandlerWrapper x0
= do x0obj <- qCheckBoxFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QqsizeHint_h (QCheckBox ()) (()) where
qsizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_sizeHint cobj_x0
foreign import ccall "qtc_QCheckBox_sizeHint" qtc_QCheckBox_sizeHint :: Ptr (TQCheckBox a) -> IO (Ptr (TQSize ()))
instance QqsizeHint_h (QCheckBoxSc a) (()) where
qsizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_sizeHint cobj_x0
instance QsizeHint_h (QCheckBox ()) (()) where
sizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_sizeHint_qth cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QCheckBox_sizeHint_qth" qtc_QCheckBox_sizeHint_qth :: Ptr (TQCheckBox a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QsizeHint_h (QCheckBoxSc a) (()) where
sizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_sizeHint_qth cobj_x0 csize_ret_w csize_ret_h
instance QchangeEvent_h (QCheckBox ()) ((QEvent t1)) where
changeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_changeEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_changeEvent" qtc_QCheckBox_changeEvent :: Ptr (TQCheckBox a) -> Ptr (TQEvent t1) -> IO ()
instance QchangeEvent_h (QCheckBoxSc a) ((QEvent t1)) where
changeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_changeEvent cobj_x0 cobj_x1
instance QfocusInEvent_h (QCheckBox ()) ((QFocusEvent t1)) where
focusInEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_focusInEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_focusInEvent" qtc_QCheckBox_focusInEvent :: Ptr (TQCheckBox a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusInEvent_h (QCheckBoxSc a) ((QFocusEvent t1)) where
focusInEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_focusInEvent cobj_x0 cobj_x1
instance QfocusOutEvent_h (QCheckBox ()) ((QFocusEvent t1)) where
focusOutEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_focusOutEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_focusOutEvent" qtc_QCheckBox_focusOutEvent :: Ptr (TQCheckBox a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusOutEvent_h (QCheckBoxSc a) ((QFocusEvent t1)) where
focusOutEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_focusOutEvent cobj_x0 cobj_x1
instance QkeyPressEvent_h (QCheckBox ()) ((QKeyEvent t1)) where
keyPressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_keyPressEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_keyPressEvent" qtc_QCheckBox_keyPressEvent :: Ptr (TQCheckBox a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyPressEvent_h (QCheckBoxSc a) ((QKeyEvent t1)) where
keyPressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_keyPressEvent cobj_x0 cobj_x1
instance QkeyReleaseEvent_h (QCheckBox ()) ((QKeyEvent t1)) where
keyReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_keyReleaseEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_keyReleaseEvent" qtc_QCheckBox_keyReleaseEvent :: Ptr (TQCheckBox a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyReleaseEvent_h (QCheckBoxSc a) ((QKeyEvent t1)) where
keyReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_keyReleaseEvent cobj_x0 cobj_x1
instance QmousePressEvent_h (QCheckBox ()) ((QMouseEvent t1)) where
mousePressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mousePressEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_mousePressEvent" qtc_QCheckBox_mousePressEvent :: Ptr (TQCheckBox a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmousePressEvent_h (QCheckBoxSc a) ((QMouseEvent t1)) where
mousePressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mousePressEvent cobj_x0 cobj_x1
instance QmouseReleaseEvent_h (QCheckBox ()) ((QMouseEvent t1)) where
mouseReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mouseReleaseEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_mouseReleaseEvent" qtc_QCheckBox_mouseReleaseEvent :: Ptr (TQCheckBox a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseReleaseEvent_h (QCheckBoxSc a) ((QMouseEvent t1)) where
mouseReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mouseReleaseEvent cobj_x0 cobj_x1
instance QactionEvent_h (QCheckBox ()) ((QActionEvent t1)) where
actionEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_actionEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_actionEvent" qtc_QCheckBox_actionEvent :: Ptr (TQCheckBox a) -> Ptr (TQActionEvent t1) -> IO ()
instance QactionEvent_h (QCheckBoxSc a) ((QActionEvent t1)) where
actionEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_actionEvent cobj_x0 cobj_x1
instance QcloseEvent_h (QCheckBox ()) ((QCloseEvent t1)) where
closeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_closeEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_closeEvent" qtc_QCheckBox_closeEvent :: Ptr (TQCheckBox a) -> Ptr (TQCloseEvent t1) -> IO ()
instance QcloseEvent_h (QCheckBoxSc a) ((QCloseEvent t1)) where
closeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_closeEvent cobj_x0 cobj_x1
instance QcontextMenuEvent_h (QCheckBox ()) ((QContextMenuEvent t1)) where
contextMenuEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_contextMenuEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_contextMenuEvent" qtc_QCheckBox_contextMenuEvent :: Ptr (TQCheckBox a) -> Ptr (TQContextMenuEvent t1) -> IO ()
instance QcontextMenuEvent_h (QCheckBoxSc a) ((QContextMenuEvent t1)) where
contextMenuEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_contextMenuEvent cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- qCheckBoxFromPtr x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler4" qtc_QCheckBox_setHandler4 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> IO (CInt)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox4 :: (Ptr (TQCheckBox x0) -> IO (CInt)) -> IO (FunPtr (Ptr (TQCheckBox x0) -> IO (CInt)))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox4_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- qCheckBoxFromPtr x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QdevType_h (QCheckBox ()) (()) where
devType_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_devType cobj_x0
foreign import ccall "qtc_QCheckBox_devType" qtc_QCheckBox_devType :: Ptr (TQCheckBox a) -> IO CInt
instance QdevType_h (QCheckBoxSc a) (()) where
devType_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_devType cobj_x0
instance QdragEnterEvent_h (QCheckBox ()) ((QDragEnterEvent t1)) where
dragEnterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dragEnterEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_dragEnterEvent" qtc_QCheckBox_dragEnterEvent :: Ptr (TQCheckBox a) -> Ptr (TQDragEnterEvent t1) -> IO ()
instance QdragEnterEvent_h (QCheckBoxSc a) ((QDragEnterEvent t1)) where
dragEnterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dragEnterEvent cobj_x0 cobj_x1
instance QdragLeaveEvent_h (QCheckBox ()) ((QDragLeaveEvent t1)) where
dragLeaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dragLeaveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_dragLeaveEvent" qtc_QCheckBox_dragLeaveEvent :: Ptr (TQCheckBox a) -> Ptr (TQDragLeaveEvent t1) -> IO ()
instance QdragLeaveEvent_h (QCheckBoxSc a) ((QDragLeaveEvent t1)) where
dragLeaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dragLeaveEvent cobj_x0 cobj_x1
instance QdragMoveEvent_h (QCheckBox ()) ((QDragMoveEvent t1)) where
dragMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dragMoveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_dragMoveEvent" qtc_QCheckBox_dragMoveEvent :: Ptr (TQCheckBox a) -> Ptr (TQDragMoveEvent t1) -> IO ()
instance QdragMoveEvent_h (QCheckBoxSc a) ((QDragMoveEvent t1)) where
dragMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dragMoveEvent cobj_x0 cobj_x1
instance QdropEvent_h (QCheckBox ()) ((QDropEvent t1)) where
dropEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dropEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_dropEvent" qtc_QCheckBox_dropEvent :: Ptr (TQCheckBox a) -> Ptr (TQDropEvent t1) -> IO ()
instance QdropEvent_h (QCheckBoxSc a) ((QDropEvent t1)) where
dropEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_dropEvent cobj_x0 cobj_x1
instance QenterEvent_h (QCheckBox ()) ((QEvent t1)) where
enterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_enterEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_enterEvent" qtc_QCheckBox_enterEvent :: Ptr (TQCheckBox a) -> Ptr (TQEvent t1) -> IO ()
instance QenterEvent_h (QCheckBoxSc a) ((QEvent t1)) where
enterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_enterEvent cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> Int -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> CInt -> IO (CInt)
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
let x1int = fromCInt x1
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj x1int
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler5" qtc_QCheckBox_setHandler5 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> CInt -> IO (CInt)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox5 :: (Ptr (TQCheckBox x0) -> CInt -> IO (CInt)) -> IO (FunPtr (Ptr (TQCheckBox x0) -> CInt -> IO (CInt)))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox5_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> Int -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> CInt -> IO (CInt)
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
let x1int = fromCInt x1
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj x1int
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QheightForWidth_h (QCheckBox ()) ((Int)) where
heightForWidth_h x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_heightForWidth cobj_x0 (toCInt x1)
foreign import ccall "qtc_QCheckBox_heightForWidth" qtc_QCheckBox_heightForWidth :: Ptr (TQCheckBox a) -> CInt -> IO CInt
instance QheightForWidth_h (QCheckBoxSc a) ((Int)) where
heightForWidth_h x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_heightForWidth cobj_x0 (toCInt x1)
instance QhideEvent_h (QCheckBox ()) ((QHideEvent t1)) where
hideEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_hideEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_hideEvent" qtc_QCheckBox_hideEvent :: Ptr (TQCheckBox a) -> Ptr (TQHideEvent t1) -> IO ()
instance QhideEvent_h (QCheckBoxSc a) ((QHideEvent t1)) where
hideEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_hideEvent cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> InputMethodQuery -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> CLong -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
let x1enum = qEnum_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1enum
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler6" qtc_QCheckBox_setHandler6 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> CLong -> IO (Ptr (TQVariant t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox6 :: (Ptr (TQCheckBox x0) -> CLong -> IO (Ptr (TQVariant t0))) -> IO (FunPtr (Ptr (TQCheckBox x0) -> CLong -> IO (Ptr (TQVariant t0))))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox6_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> InputMethodQuery -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> CLong -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
let x1enum = qEnum_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1enum
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QinputMethodQuery_h (QCheckBox ()) ((InputMethodQuery)) where
inputMethodQuery_h x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_inputMethodQuery cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QCheckBox_inputMethodQuery" qtc_QCheckBox_inputMethodQuery :: Ptr (TQCheckBox a) -> CLong -> IO (Ptr (TQVariant ()))
instance QinputMethodQuery_h (QCheckBoxSc a) ((InputMethodQuery)) where
inputMethodQuery_h x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_inputMethodQuery cobj_x0 (toCLong $ qEnum_toInt x1)
instance QleaveEvent_h (QCheckBox ()) ((QEvent t1)) where
leaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_leaveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_leaveEvent" qtc_QCheckBox_leaveEvent :: Ptr (TQCheckBox a) -> Ptr (TQEvent t1) -> IO ()
instance QleaveEvent_h (QCheckBoxSc a) ((QEvent t1)) where
leaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_leaveEvent cobj_x0 cobj_x1
instance QqminimumSizeHint_h (QCheckBox ()) (()) where
qminimumSizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_minimumSizeHint cobj_x0
foreign import ccall "qtc_QCheckBox_minimumSizeHint" qtc_QCheckBox_minimumSizeHint :: Ptr (TQCheckBox a) -> IO (Ptr (TQSize ()))
instance QqminimumSizeHint_h (QCheckBoxSc a) (()) where
qminimumSizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_minimumSizeHint cobj_x0
instance QminimumSizeHint_h (QCheckBox ()) (()) where
minimumSizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_minimumSizeHint_qth cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QCheckBox_minimumSizeHint_qth" qtc_QCheckBox_minimumSizeHint_qth :: Ptr (TQCheckBox a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QminimumSizeHint_h (QCheckBoxSc a) (()) where
minimumSizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_minimumSizeHint_qth cobj_x0 csize_ret_w csize_ret_h
instance QmouseDoubleClickEvent_h (QCheckBox ()) ((QMouseEvent t1)) where
mouseDoubleClickEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mouseDoubleClickEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_mouseDoubleClickEvent" qtc_QCheckBox_mouseDoubleClickEvent :: Ptr (TQCheckBox a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseDoubleClickEvent_h (QCheckBoxSc a) ((QMouseEvent t1)) where
mouseDoubleClickEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_mouseDoubleClickEvent cobj_x0 cobj_x1
instance QmoveEvent_h (QCheckBox ()) ((QMoveEvent t1)) where
moveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_moveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_moveEvent" qtc_QCheckBox_moveEvent :: Ptr (TQCheckBox a) -> Ptr (TQMoveEvent t1) -> IO ()
instance QmoveEvent_h (QCheckBoxSc a) ((QMoveEvent t1)) where
moveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_moveEvent cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> IO (QPaintEngine t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO (Ptr (TQPaintEngine t0))
setHandlerWrapper x0
= do x0obj <- qCheckBoxFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler7" qtc_QCheckBox_setHandler7 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> IO (Ptr (TQPaintEngine t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox7 :: (Ptr (TQCheckBox x0) -> IO (Ptr (TQPaintEngine t0))) -> IO (FunPtr (Ptr (TQCheckBox x0) -> IO (Ptr (TQPaintEngine t0))))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox7_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> IO (QPaintEngine t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> IO (Ptr (TQPaintEngine t0))
setHandlerWrapper x0
= do x0obj <- qCheckBoxFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QpaintEngine_h (QCheckBox ()) (()) where
paintEngine_h x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_paintEngine cobj_x0
foreign import ccall "qtc_QCheckBox_paintEngine" qtc_QCheckBox_paintEngine :: Ptr (TQCheckBox a) -> IO (Ptr (TQPaintEngine ()))
instance QpaintEngine_h (QCheckBoxSc a) (()) where
paintEngine_h x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_paintEngine cobj_x0
instance QresizeEvent_h (QCheckBox ()) ((QResizeEvent t1)) where
resizeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_resizeEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_resizeEvent" qtc_QCheckBox_resizeEvent :: Ptr (TQCheckBox a) -> Ptr (TQResizeEvent t1) -> IO ()
instance QresizeEvent_h (QCheckBoxSc a) ((QResizeEvent t1)) where
resizeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_resizeEvent cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> Bool -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox8 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox8_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler8 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> CBool -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
let x1bool = fromCBool x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1bool
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler8" qtc_QCheckBox_setHandler8 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> CBool -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox8 :: (Ptr (TQCheckBox x0) -> CBool -> IO ()) -> IO (FunPtr (Ptr (TQCheckBox x0) -> CBool -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox8_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> Bool -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox8 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox8_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler8 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> CBool -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qCheckBoxFromPtr x0
let x1bool = fromCBool x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1bool
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetVisible_h (QCheckBox ()) ((Bool)) where
setVisible_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_setVisible cobj_x0 (toCBool x1)
foreign import ccall "qtc_QCheckBox_setVisible" qtc_QCheckBox_setVisible :: Ptr (TQCheckBox a) -> CBool -> IO ()
instance QsetVisible_h (QCheckBoxSc a) ((Bool)) where
setVisible_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QCheckBox_setVisible cobj_x0 (toCBool x1)
instance QshowEvent_h (QCheckBox ()) ((QShowEvent t1)) where
showEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_showEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_showEvent" qtc_QCheckBox_showEvent :: Ptr (TQCheckBox a) -> Ptr (TQShowEvent t1) -> IO ()
instance QshowEvent_h (QCheckBoxSc a) ((QShowEvent t1)) where
showEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_showEvent cobj_x0 cobj_x1
instance QtabletEvent_h (QCheckBox ()) ((QTabletEvent t1)) where
tabletEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_tabletEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_tabletEvent" qtc_QCheckBox_tabletEvent :: Ptr (TQCheckBox a) -> Ptr (TQTabletEvent t1) -> IO ()
instance QtabletEvent_h (QCheckBoxSc a) ((QTabletEvent t1)) where
tabletEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_tabletEvent cobj_x0 cobj_x1
instance QwheelEvent_h (QCheckBox ()) ((QWheelEvent t1)) where
wheelEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_wheelEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QCheckBox_wheelEvent" qtc_QCheckBox_wheelEvent :: Ptr (TQCheckBox a) -> Ptr (TQWheelEvent t1) -> IO ()
instance QwheelEvent_h (QCheckBoxSc a) ((QWheelEvent t1)) where
wheelEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QCheckBox_wheelEvent cobj_x0 cobj_x1
instance QsetHandler (QCheckBox ()) (QCheckBox x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox9 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox9_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler9 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qCheckBoxFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QCheckBox_setHandler9" qtc_QCheckBox_setHandler9 :: Ptr (TQCheckBox a) -> CWString -> Ptr (Ptr (TQCheckBox x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QCheckBox9 :: (Ptr (TQCheckBox x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> IO (FunPtr (Ptr (TQCheckBox x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QCheckBox9_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QCheckBoxSc a) (QCheckBox x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QCheckBox9 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QCheckBox9_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QCheckBox_setHandler9 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQCheckBox x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qCheckBoxFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QeventFilter_h (QCheckBox ()) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QCheckBox_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QCheckBox_eventFilter" qtc_QCheckBox_eventFilter :: Ptr (TQCheckBox a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter_h (QCheckBoxSc a) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QCheckBox_eventFilter cobj_x0 cobj_x1 cobj_x2
| keera-studios/hsQt | Qtc/Gui/QCheckBox_h.hs | bsd-2-clause | 56,487 | 0 | 18 | 12,271 | 18,825 | 9,082 | 9,743 | -1 | -1 |
-- PaperReaders.hs
--
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, TemplateHaskell #-}
module Parser.PaperReader (
parseHtml
, readerFromUrl
, readersFromUrl
, readerFromHtml
, readersFromHtml
, readersFromHtmlDoc
) where
import Parser.Import
import Control.Applicative
import Control.Lens hiding ((.=))
import qualified Parser.Lens as L
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Lazy (fromStrict)
import Data.Text.Lazy.Encoding (encodeUtf8)
import qualified Text.HTML.DOM as H
import Text.XML.Cursor
import Text.XML (Document)
import Data.Tree
import Text.HTML.SanitizeXSS
import Parser.Publisher.ACS
import Parser.Publisher.NatureL
import Parser.Publisher.NatureA
import Parser.Publisher.Nature2
import Parser.Publisher.Nature3
import Parser.Publisher.Nature4
import Parser.Publisher.NatureRev
import Parser.Publisher.Elsevier
import Parser.Publisher.PLoSONE
import Parser.Publisher.Science
import Parser.Publisher.Wiley
import Parser.Publisher.PNAS
import Parser.Publisher.AnnualRev
import Parser.Publisher.Rockfeller
import Parser.Publisher.Generic
-- Some functions in this module return a list of possible readers.
-- Some just choose the first one, so the order matters in some cases.
-- ToDo: Organize this system.
-- Especially Nature journals are a bit complicated.
readerList :: [PaperReader]
readerList = [acsAReader,acsLReader,
natureAReader,natureLReader,
nature2AReader,nature2LReader, nature2OtherReader,
natureRevReader,nature3Reader,nature4Reader,
elsevier1Reader,elsevier2Reader,
scienceReader,stkeReader,
wileyReaderC,
pnasReader,pnasLikeReader,
annualRevReader,
rupressReader,
plosReader
, genericReader]
-- This is the primary function.
parseHtml :: Url -> Text -> IO (Maybe Paper)
parseHtml url html = do
let
doc = H.parseLBS $ encodeUtf8 $ fromStrict html
rs = readersFromHtmlDoc url doc
mr = headMay rs
case mr of
Just r -> fmap (Just . sanitizePaper) $ (parsePaper r) r url html doc
Nothing -> return Nothing
-- Stub: sanitize other things as well.
-- Or maybe I can just sanitize after HTML generation for viewing.
-- And I can keep all my adde JS into a .js file.
sanitizePaper :: Paper -> Paper
sanitizePaper p =
L.citation %~ sanitizeCit $
L.references %~ (map sanitizeRef) $
L.figures %~ (map sanitizeFig) $
L.abstract %~ (fmap sanitize) $
L.mainHtml %~ (fmap sanitizeMainHtml) $ p
sanitizeMainHtml :: PaperMainText -> PaperMainText
sanitizeMainHtml (FlatHtml html) = FlatHtml (sanitize html)
sanitizeMainHtml (Structured n) = Structured $ sanitizeNode n
where
sanitizeNode (Node (tag,txt) ns) = Node (sanitize tag, sanitize txt) (map sanitizeNode ns)
sanitizeRef :: Reference -> Reference
sanitizeRef (Reference id name mcit mtxt murl)
= Reference (sanitize id) (sanitize name) (sanitizeCit <$> mcit) (sanitize <$> mtxt) (sanitize <$> murl)
sanitizeCit :: Citation -> Citation
sanitizeCit (Citation doi url title journal year vol from to auth pub typ)
= Citation (sanitize <$> doi) (sanitize <$> url) (sanitize <$> title)
(sanitize <$> journal) year (sanitize <$> vol) (sanitize <$> from) (sanitize <$> to) (map sanitize auth)
(sanitize <$> pub) (sanitize <$> typ)
sanitizeFig :: Figure -> Figure
sanitizeFig (Figure id name annot img) = (Figure (sanitize id) (sanitize name) (sanitize annot) (sanitize img))
readerFromUrl :: Url -> Maybe PaperReader
readerFromUrl url = headMay $ readersFromUrl url
readersFromUrl :: Url -> [PaperReader]
readersFromUrl url = filter (\r -> isJust $ (supportedUrl r) r url) readerList
readerFromHtml :: Url -> T.Text -> Maybe PaperReader
readerFromHtml url html = headMay $ readersFromHtml url html
readersFromHtml :: Url -> T.Text -> [PaperReader]
readersFromHtml url html = filter (\r -> isJust $ (supported r) r url cur) readerList
where
doc = H.parseLBS $ encodeUtf8 $ fromStrict html
cur = fromDocument doc
readersFromHtmlDoc :: Url -> Document -> [PaperReader]
readersFromHtmlDoc url doc = filter (\r -> isJust $ (supported r) r url cur) readerList
where
cur = fromDocument doc
| hirokai/PaperServer | Parser/PaperReader.hs | bsd-2-clause | 4,270 | 0 | 15 | 802 | 1,171 | 646 | 525 | 92 | 2 |
-- vim: sw=2: ts=2: set expandtab:
{-# LANGUAGE TemplateHaskell,
ScopedTypeVariables,
FlexibleInstances,
MultiParamTypeClasses,
FlexibleContexts,
UndecidableInstances,
OverloadedStrings,
CPP #-}
-----------------------------------------------------------------------------
--
-- Module : Syntax
-- Copyright : BSD
-- License : AllRightsReserved
--
-- Maintainer : Ki Yung Ahn
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Syntax ( PSUT(..)
, TmName, Tm, isTm -- (..)
, TyName, Ty, isTy -- (..)
, KiName, Ki, isKi -- (..)
, KArg, TArg, TArgName, IxMap
) where
import Unbound.LocallyNameless hiding (Con)
import GHC.Exts( IsString(..) )
type KiName = Name Ki
type TyName = Name Ty
type TmName = Name Tm
data PSUT
-- data Ki
-- = KVar KiName -- shared with Var
= Star
| KArr KArg Ki
-- data Ty
-- = TVar TyName -- shared with Var
| TCon TyName
| TArr Ty Ty
| TApp Ty TArg
| TFix Ty -- Ty must be TCon or application of TCon to other arguments
-- data Tm
| Var TmName
| Con TmName
| In Integer Tm
| MIt (Bind TmName Tm) -- Tm must be Alt
| MPr (Bind (TmName,TmName) Tm) -- Tm must be Alt
| Lam (Bind TmName Tm)
| App Tm Tm
| Let (Bind (TmName, Embed Tm) Tm)
| Alt (Maybe IxMap) [(TmName,(Bind [TmName] Tm))]
type Ki = PSUT
type Ty = PSUT
type Tm = PSUT
isKi (Var _) = True
isKi Star = True
isKi (KArr _ _) = True
isKi _ = False
isTy (Var _) = True
isTy (TCon _) = True
isTy (TArr _ _) = True
isTy (TApp _ _) = True
isTy (TFix _) = True
isTy _ = False
isTm (Var _) = True
isTm (Con _) = True
isTm (In _ _) = True
isTm (MIt _) = True
isTm (MPr _) = True
isTm (Lam _) = True
isTm (App _ _) = True
isTm (Let _) = True
isTm (Alt _ _) = True
isTm _ = False
type KArg = Either Ty Ki -- RIght is Ki, Left is Ty
type TArg = Either Tm Ty -- Right is Ty, Left is Tm
-- assuming only variable form of indicies in IxMap
type IxMap = Bind [TArgName] Ty
type TArgName = Either TmName TyName -- Right is TyName, Left is TmName
-- $(derive [''Ki, ''Ty, ''Tm])
$(derive [''PSUT])
-- names as string literals
instance Rep a => IsString (Name a) where
fromString = string2Name
-- Alpha and Sbust instances are in Parser module
-- in order to avoid mutually recursive module imports
-- since Show class instantces for Ki, Ty, Tm depends on LBNF functions
| kyagrd/mininax | src/Syntax.hs | bsd-2-clause | 2,651 | 0 | 11 | 774 | 675 | 385 | 290 | 64 | 1 |
{-# LANGUAGE LambdaCase, TupleSections, ViewPatterns #-}
module Transformations.Optimising.ConstantFolding where
import Text.Printf
import Transformations.Util
import Data.Functor.Foldable
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Grin.Grin
{-
HINT:
Constant folding is not part of the official grin optimization pipeline because it causes problems with confluency.
However it could be useful for debugging purposes.
IDEA: fold everything unconditionally
-}
type Env = (Map Name Name, Map Val Val)
constantFolding :: Exp -> Exp
constantFolding e = ana builder (mempty, e) where
builder :: (Env, Exp) -> ExpF (Env, Exp)
builder (env@(nameEnv, valEnv), exp) = let e = substVals valEnv . substVarRefExp nameEnv $ exp in case e of
EBind (SReturn val) lpat rightExp -> EBindF (env, SReturn $ subst valEnv val) lpat (newEnv, rightExp) where
newEnv = env `mappend` unify env val lpat
_ -> (env,) <$> project e
unify :: Env -> Val -> LPat -> Env
unify env@(nameEnv, valEnv) (subst valEnv -> val) lpat = case (lpat, val) of
(ConstTagNode lpatTag lpatArgs, ConstTagNode valTag valArgs)
| lpatTag == valTag -> mconcat $ zipWith (unify env) valArgs lpatArgs
(Var lpatVar, Var valVar) -> (Map.singleton lpatVar valVar, Map.singleton lpat val) -- update val + name env
(Var{}, _) -> (mempty, Map.singleton lpat val) -- update val env
_ -> mempty -- LPat: unit, lit, tag
| andorp/grin | grin/src/Transformations/Optimising/ConstantFolding.hs | bsd-3-clause | 1,514 | 0 | 15 | 337 | 445 | 243 | 202 | 23 | 5 |
{-| This module contains some useful utilities copy-and-pasted from the @lens@
library to avoid a dependency which are used internally and also re-exported
for convenience
-}
module Dhall.Optics
( Optic
, Optic'
-- * Utilities
, rewriteOf
, transformOf
, rewriteMOf
, transformMOf
, mapMOf
, cosmosOf
, to
, foldOf
) where
import Control.Applicative (Const (..), WrappedMonad (..))
import Data.Functor.Contravariant (Contravariant (contramap))
import Data.Profunctor (Profunctor (dimap))
import Data.Profunctor.Unsafe ((#.))
import Lens.Family (ASetter, LensLike, LensLike', over)
-- | Identical to @"Control.Lens".`Control.Lens.rewriteOf`@
rewriteOf :: ASetter a b a b -> (b -> Maybe a) -> a -> b
rewriteOf l f = go
where
go = transformOf l (\x -> maybe x go (f x))
{-# INLINE rewriteOf #-}
-- | Identical to @"Control.Lens".`Control.Lens.transformOf`@
transformOf :: ASetter a b a b -> (b -> b) -> a -> b
transformOf l f = go
where
go = f . over l go
{-# INLINE transformOf #-}
-- | Identical to @"Control.Lens".`Control.Lens.rewriteMOf`@
rewriteMOf
:: Monad m
=> LensLike (WrappedMonad m) a b a b -> (b -> m (Maybe a)) -> a -> m b
rewriteMOf l f = go
where
go = transformMOf l (\x -> f x >>= maybe (return x) go)
{-# INLINE rewriteMOf #-}
-- | Identical to @"Control.Lens".`Control.Lens.transformMOf`@
transformMOf
:: Monad m => LensLike (WrappedMonad m) a b a b -> (b -> m b) -> a -> m b
transformMOf l f = go
where
go t = mapMOf l go t >>= f
{-# INLINE transformMOf #-}
-- | Identical to @"Control.Lens".`Control.Lens.mapMOf`@
mapMOf :: LensLike (WrappedMonad m) s t a b -> (a -> m b) -> s -> m t
mapMOf l cmd = unwrapMonad #. l (WrapMonad #. cmd)
{-# INLINE mapMOf #-}
-- | Identical to @"Control.Lens.Plated".`Control.Lens.Plated.cosmosOf`@
cosmosOf :: (Applicative f, Contravariant f) => LensLike' f a a -> LensLike' f a a
cosmosOf d f s = f s *> d (cosmosOf d f) s
{-# INLINE cosmosOf #-}
-- | Identical to @"Control.Lens.Type".`Control.Lens.Type.Optic`@
type Optic p f s t a b = p a (f b) -> p s (f t)
-- | Identical to @"Control.Lens.Type".`Control.Lens.Type.Optic'`@
type Optic' p f s a = Optic p f s s a a
-- | Identical to @"Control.Lens.Getter".`Control.Lens.Getter.to`@
to :: (Profunctor p, Contravariant f) => (s -> a) -> Optic' p f s a
to k = dimap k (contramap k)
{-# INLINE to #-}
-- | Identical to @"Control.Lens.Getter".`Control.Lens.Getter.Getting`@
type Getting r s a = (a -> Const r a) -> s -> Const r s
-- | Identical to @"Control.Lens.Fold".`Control.Lens.Fold.foldOf`@
foldOf :: Getting a s a -> s -> a
foldOf l = getConst #. l Const
{-# INLINE foldOf #-}
| Gabriel439/Haskell-Dhall-Library | dhall/src/Dhall/Optics.hs | bsd-3-clause | 2,728 | 0 | 13 | 595 | 802 | 435 | 367 | 50 | 1 |
import Control.Concurrent.Chan (Chan, readChan, writeChan)
import Control.Monad
import Control.ThreadPool (threadPoolIO)
import Data.List
import System.Directory
import System.Environment
import Text.HandsomeSoup
import Text.XML.HXT.Core
main = do
galleries <- parseTopPage
runPool 5 $ map printImageUrlsIfNotDownloaded galleries
parseTopPage :: IO [String]
parseTopPage = do
doc <- fromUrl "http://www.s-cute.com/"
runX $ doc >>> css ".newcontent h5 a" ! "href"
printImageUrlsIfNotDownloaded :: String -> IO ()
printImageUrlsIfNotDownloaded url = do
urls <- extractGalleryPage url
mapM_ putStrLn =<< filterM (liftM not . downloaded) urls
downloaded :: String -> IO Bool
downloaded = doesFileExist . filename
filename :: String -> FilePath
filename = reverse . takeWhile (/= '/') . reverse
extractGalleryPage :: String -> IO [String]
extractGalleryPage url = do
doc <- fromUrl url
images <- runX $ doc >>> css "#gallery a" ! "href"
movies <- runX $ doc >>> css "source" ! "src"
return $ images ++ movies
-- http://stackoverflow.com/questions/9193349/how-do-i-create-a-thread-pool
runPool :: Int -> [IO a] -> IO [a]
runPool n as = do
(input, output) <- threadPoolIO n id
forM_ as $ writeChan input
sequence (take (length as) . repeat $ readChan output)
| masaedw/scdownloader | scdownloader.hs | bsd-3-clause | 1,285 | 0 | 13 | 210 | 422 | 210 | 212 | 34 | 1 |
module BottomUpMergeSort where
import Control.Monad.Writer
import Data.Foldable (foldlM)
type Less a = a -> a -> Bool
data Sortable a = Sortable { _less :: Less a
, _size :: Int
, _segments :: [[a]] }
instance Show a => Show (Sortable a) where
show (Sortable _ s segs) = "Sortable { _size = " ++ show s
++ ", _segments = " ++ show segs
type ComputationLog a = Writer [String] a
merge :: Less a -> [a] -> [a] -> [a]
merge less = mrg
where
mrg [] ys = ys
mrg xs [] = xs
mrg (x:xs) (y:ys) = if less x y
then x : mrg xs (y:ys)
else y : mrg (x:xs) ys
new :: Less a -> ComputationLog (Sortable a)
new less = return $ Sortable { _less = less
, _size = 0
, _segments = [] }
add :: (Show a) => a -> Sortable a -> ComputationLog (Sortable a)
add x (Sortable l size segs) = do
let res = Sortable l (size + 1) (addSeg [x] segs size)
tell $ ["Adding " ++ show x]
tell $ [show res]
return res
where
addSeg seg segs' size' =
if size' `mod` 2 == 0
then seg : segs'
else addSeg (merge l seg (head segs')) (tail segs') (size' `div` 2)
sort :: Sortable a -> [a]
sort (Sortable l _ segs) = mergeAll [] segs
where
mergeAll xs [] = xs
mergeAll xs (seg:segs') = mergeAll (merge l xs seg) segs'
fromList :: (Ord a, Show a)
=> [a] -> ComputationLog (Sortable a)
fromList xs = (new (<)) >>= \initial -> foldlM (flip add) initial xs
runFromList :: (Ord a, Show a)
=> [a] -> IO ()
runFromList xs = do
let (sortable, compLog) = runWriter $ fromList xs
putStrLn "Sortable:"
print sortable
putStrLn "Log:"
forM_ compLog $ \l -> do
putStrLn l
| k-bx/bottom-up-merge-sort | src/BottomUpMergeSort.hs | bsd-3-clause | 1,843 | 0 | 13 | 644 | 793 | 410 | 383 | 48 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.ProductOrd
-- Copyright : (c) Artem Chirkin
-- License : BSD3
--
--
-- Compare product types -- partial order.
--
-----------------------------------------------------------------------------
module Numeric.ProductOrd (ProductOrder (..), PartialOrdering (..), fromOrdering) where
import Data.Data
import Data.Kind (Type)
import Data.Monoid as Mon (Monoid (..))
import Data.Semigroup as Sem (Semigroup (..), stimesIdempotentMonoid)
import GHC.Generics
import Numeric.TypedList
-- | Partial order for comparing product types --
-- [product order](https://en.wikipedia.org/wiki/Product_order).
class ProductOrder a where
-- | Same as `compare`, but may return @Incomparable@.
cmp :: a -> a -> PartialOrdering
-- | Similar to `Ordering`, but may be @Incomparable@.
data PartialOrdering = PLT | PEQ | PGT | Incomparable
deriving ( Eq, Ord, Show, Read, Data, Typeable, Generic, Enum, Bounded )
-- | Extend `Ordering` with @Incomparable@ option.
fromOrdering :: Ordering -> PartialOrdering
fromOrdering LT = PLT
fromOrdering EQ = PEQ
fromOrdering GT = PGT
{-# INLINE fromOrdering #-}
instance Sem.Semigroup PartialOrdering where
Incomparable <> _ = Incomparable
_ <> Incomparable = Incomparable
PLT <> PGT = Incomparable
PGT <> PLT = Incomparable
PLT <> _ = PLT
PGT <> _ = PGT
PEQ <> y = y
stimes = stimesIdempotentMonoid
instance Mon.Monoid PartialOrdering where
mempty = PEQ
#if !(MIN_VERSION_base(4,11,0))
mappend = (<>)
#endif
instance All Ord (Map f xs)
=> ProductOrder (TypedList (f :: k -> Type) (xs :: [k])) where
cmp U U = PEQ
cmp (a :* as) (b :* bs) = fromOrdering (compare a b) <> cmp as bs
cmp' :: Ord a => a -> a -> PartialOrdering
cmp' a b = fromOrdering (compare a b)
{-# INLINE cmp' #-}
instance (Ord a1, Ord a2)
=> ProductOrder (a1, a2) where
cmp (a1, a2)
(b1, b2)
= cmp' a1 b1 <> cmp' a2 b2
instance (Ord a1, Ord a2, Ord a3)
=> ProductOrder (a1, a2, a3) where
cmp (a1, a2, a3)
(b1, b2, b3)
= cmp' a1 b1 <> cmp' a2 b2 <> cmp' a3 b3
instance (Ord a1, Ord a2, Ord a3, Ord a4)
=> ProductOrder (a1, a2, a3, a4) where
cmp (a1, a2, a3, a4)
(b1, b2, b3, b4)
= cmp' a1 b1 <> cmp' a2 b2 <> cmp' a3 b3
<> cmp' a4 b4
instance (Ord a1, Ord a2, Ord a3, Ord a4, Ord a5)
=> ProductOrder (a1, a2, a3, a4, a5) where
cmp (a1, a2, a3, a4, a5)
(b1, b2, b3, b4, b5)
= cmp' a1 b1 <> cmp' a2 b2 <> cmp' a3 b3
<> cmp' a4 b4 <> cmp' a5 b5
instance (Ord a1, Ord a2, Ord a3, Ord a4, Ord a5, Ord a6)
=> ProductOrder (a1, a2, a3, a4, a5, a6) where
cmp (a1, a2, a3, a4, a5, a6)
(b1, b2, b3, b4, b5, b6)
= cmp' a1 b1 <> cmp' a2 b2 <> cmp' a3 b3
<> cmp' a4 b4 <> cmp' a5 b5 <> cmp' a6 b6
instance (Ord a1, Ord a2, Ord a3, Ord a4, Ord a5, Ord a6, Ord a7)
=> ProductOrder (a1, a2, a3, a4, a5, a6, a7) where
cmp (a1, a2, a3, a4, a5, a6, a7)
(b1, b2, b3, b4, b5, b6, b7)
= cmp' a1 b1 <> cmp' a2 b2 <> cmp' a3 b3
<> cmp' a4 b4 <> cmp' a5 b5 <> cmp' a6 b6
<> cmp' a7 b7
instance (Ord a1, Ord a2, Ord a3, Ord a4, Ord a5, Ord a6, Ord a7, Ord a8)
=> ProductOrder (a1, a2, a3, a4, a5, a6, a7, a8) where
cmp (a1, a2, a3, a4, a5, a6, a7, a8)
(b1, b2, b3, b4, b5, b6, b7, b8)
= cmp' a1 b1 <> cmp' a2 b2 <> cmp' a3 b3
<> cmp' a4 b4 <> cmp' a5 b5 <> cmp' a6 b6
<> cmp' a7 b7 <> cmp' a8 b8
instance (Ord a1, Ord a2, Ord a3, Ord a4, Ord a5, Ord a6, Ord a7, Ord a8, Ord a9)
=> ProductOrder (a1, a2, a3, a4, a5, a6, a7, a8, a9) where
cmp (a1, a2, a3, a4, a5, a6, a7, a8, a9)
(b1, b2, b3, b4, b5, b6, b7, b8, b9)
= cmp' a1 b1 <> cmp' a2 b2 <> cmp' a3 b3
<> cmp' a4 b4 <> cmp' a5 b5 <> cmp' a6 b6
<> cmp' a7 b7 <> cmp' a8 b8 <> cmp' a9 b9
| achirkin/easytensor | easytensor/src/Numeric/ProductOrd.hs | bsd-3-clause | 4,245 | 0 | 14 | 1,161 | 1,674 | 915 | 759 | 91 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.LinearAlgebra.Vector.Statistics
-- Copyright : Copyright (c) 2010, Patrick Perry <[email protected]>
-- License : BSD3
-- Maintainer : Patrick Perry <[email protected]>
-- Stability : experimental
--
-- Basic multivariate statistics.
--
module Numeric.LinearAlgebra.Vector.Statistics (
-- * Immutable interface
sum,
mean,
weightedSum,
weightedMean,
-- * Mutable interface
addSumTo,
meanTo,
addWeightedSumTo,
weightedMeanTo,
) where
import Prelude hiding ( sum )
import Control.Monad( forM_ )
import Control.Monad.ST( ST )
import Numeric.LinearAlgebra.Types
import Numeric.LinearAlgebra.Vector.Base( Vector )
import Numeric.LinearAlgebra.Vector.STBase( RVector, STVector )
import qualified Numeric.LinearAlgebra.Vector.STBase as V
-- | Returns the sum of the vectors. The first argument gives the dimension
-- of the vectors.
sum :: (BLAS1 e) => Int -> [Vector e] -> Vector e
sum p xs = V.create $ do
s <- V.new_ p
V.clear s
addSumTo s xs
return s
-- | Returns the mean of the vectors. The first argument gives the dimension
-- of the vectors.
mean :: (BLAS1 e) => Int -> [Vector e] -> Vector e
mean p xs = V.create $ do
m <- V.new_ p
meanTo m xs
return m
-- | Returns the weighted sum of the vectors. The first argument gives the
-- dimension of the vectors.
weightedSum :: (BLAS1 e) => Int -> [(e, Vector e)] -> Vector e
weightedSum p wxs = V.create $ do
s <- V.new_ p
V.clear s
addWeightedSumTo s wxs
return s
-- | Returns the weighted mean of the vectors. The first argument gives the
-- dimension of the vectors.
weightedMean :: (BLAS1 e)
=> Int -> [(Double, Vector e)] -> Vector e
weightedMean p wxs = V.create $ do
m <- V.new_ p
weightedMeanTo m wxs
return m
-- | Adds the sum of the vectors to the target vector.
addSumTo :: (RVector v, BLAS1 e) => STVector s e -> [v e] -> ST s ()
addSumTo dst = addWeightedSumTo dst . zip (repeat 1)
-- | Sets the target vector to the mean of the vectors.
meanTo :: (RVector v, BLAS1 e)
=> STVector s e -> [v e] -> ST s()
meanTo dst = weightedMeanTo dst . zip (repeat 1)
-- | Adds the weigthed sum of the vectors to the target vector.
addWeightedSumTo :: (RVector v, BLAS1 e)
=> STVector s e -> [(e, v e)] -> ST s ()
addWeightedSumTo s wxs = do
n <- V.getDim s
err <- V.new n 0
old_s <- V.new_ n
diff <- V.new_ n
val <- V.new_ n
forM_ wxs $ \(w,x) -> do
V.unsafeCopyTo old_s s -- old_s := s
V.unsafeCopyTo val x -- val := w * x
V.scaleM_ w val
V.addTo err err val -- err := err + val
V.addTo s s err -- s := s + err
V.subTo diff old_s s -- diff := old_s - s
V.addTo err diff val -- err := diff + val
-- | Sets the target vector to the weighted mean of the vectors.
weightedMeanTo :: (RVector v, BLAS1 e)
=> STVector s e -> [(Double, v e)] -> ST s ()
weightedMeanTo m wxs = let
go _ _ [] = return ()
go diff w_sum ((w,x):wxs') | w == 0 = go diff w_sum wxs'
| otherwise = let w_sum' = w_sum + w
in do
V.subTo diff x m
V.addWithScaleM_
(realToFrac $ w/w_sum') diff m
go diff w_sum' wxs'
in do
n <- V.getDim m
diff <- V.new_ n
V.clear m
go diff 0 wxs
| patperry/hs-linear-algebra | lib/Numeric/LinearAlgebra/Vector/Statistics.hs | bsd-3-clause | 3,675 | 0 | 17 | 1,154 | 1,061 | 534 | 527 | 76 | 2 |
{-# LANGUAGE FlexibleInstances, MonadComprehensions, MultiParamTypeClasses,
TupleSections, TypeFamilies #-}
{- |
Module : Data.Keyless.Map.Lazy
Description : Lazy Map-based lookup tables.
Copyright : (c) Ivan Lazar Miljenovic
License : 3-Clause BSD-style
Maintainer : [email protected]
-}
module Data.Keyless.Map.Lazy (KeylessMap) where
import Control.DeepSeq (NFData (..))
import Data.Keyless
import qualified Data.Map.Lazy as M
import Prelude hiding (lookup, map)
-- -----------------------------------------------------------------------------
data KeylessMap a = KM { table :: !(M.Map Key a)
, nextKey :: {-# UNPACK #-} !Key
}
deriving (Eq, Ord, Show, Read)
instance Functor KeylessMap where
fmap = mapKM
{-# INLINE fmap #-}
instance (NFData a) => NFData (KeylessMap a) where
rnf (KM tbl nk) = rnf tbl `seq` rnf nk
initKM :: KeylessMap a
initKM = KM M.empty initKey
insertKM :: a -> KeylessMap a -> (Key, KeylessMap a)
insertKM v (KM tbl k) = (k, KM tbl' k')
where
k' = succ k
tbl' = M.insert k v tbl
insertBulkKM :: [a] -> KeylessMap a -> ([Key], KeylessMap a)
insertBulkKM as (KM tbl nk) = (ks, KM tbl' $ nk + sz')
where
kas = M.fromAscList $ zip [nk..] as
sz' = M.size kas
ks = M.keys kas
tbl' = tbl `M.union` kas
deleteKM :: Key -> KeylessMap a -> KeylessMap a
deleteKM k km = km { table = M.delete k $ table km }
deleteBulkKM :: [Key] -> KeylessMap a -> KeylessMap a
deleteBulkKM ks km = km { table = table km `M.difference` ks' }
where
ks' = M.fromList $ fmap (,()) ks
lookupKM :: Key -> KeylessMap a -> Maybe a
lookupKM k = M.lookup k . table
unsafeLookupKM :: Key -> KeylessMap a -> a
unsafeLookupKM k = (M.! k) . table
hasEntryKM :: Key -> KeylessMap a -> Bool
hasEntryKM k = M.member k . table
adjustKM :: (a -> a) -> Key -> KeylessMap a -> KeylessMap a
adjustKM f k km = km { table = M.adjust f k $ table km }
sizeKM :: KeylessMap a -> Int
sizeKM = M.size . table
minKeyKM :: KeylessMap a -> Maybe Key
minKeyKM = fmap (fst . fst) . M.minViewWithKey . table
maxKeyKM :: KeylessMap a -> Maybe Key
maxKeyKM = fmap (fst . fst) . M.maxViewWithKey . table
-- Can use this for Map as we only store values we want.
isNullKM :: KeylessMap a -> Bool
isNullKM = M.null . table
keysKM :: KeylessMap a -> [Key]
keysKM = M.keys . table
valuesKM :: KeylessMap a -> [a]
valuesKM = M.elems . table
assocsKM :: KeylessMap a -> [(Key, a)]
assocsKM = M.assocs . table
fromListKM :: [a] -> KeylessMap a
fromListKM vs = KM tbl nxtK
where
tbl = M.fromAscList $ zip [initKey..] vs
nxtK = maybe initKey (succ . fst . fst) $ M.maxViewWithKey tbl
unsafeFromListWithKeysKM :: [(Key, a)] -> KeylessMap a
unsafeFromListWithKeysKM kvs = KM tbl nxtK
where
tbl = M.fromList kvs -- Don't know if sorted
nxtK = maybe initKey (succ . fst . fst) $ M.maxViewWithKey tbl
mergeKM :: KeylessMap a -> KeylessMap a
-> ((Key -> Key), KeylessMap a)
mergeKM (KM tbl1 n1) (KM tbl2 n2) = (kf, KM tbl nxt)
where
kf = (+n1)
tbl2' = M.mapKeysMonotonic kf tbl2
tbl = M.union tbl1 tbl2'
nxt = kf n2
mergeAllKM :: [KeylessMap a] -> ([MergeTranslation Key], KeylessMap a)
mergeAllKM [] = ([], initKM)
mergeAllKM kvs = (mts, KM { table = tbl, nextKey = nk })
where
szs = fmap nextKey kvs
fs = fmap (+) . scanl (+) 0 $ init szs
tbl = M.unions . zipWith M.mapKeysMonotonic fs $ fmap table kvs
mts = zipWith toMT fs szs
toMT f nxtKey = MT { newBounds = [ (f initKey, f $ pred nxtKey)
| nxtKey > initKey
]
, oldToNew = f
}
nk = last $ zipWith ($) fs szs
differenceKM :: KeylessMap a -> KeylessMap a
-> KeylessMap a
differenceKM km1 km2 = km1 { table = table km1 `M.difference` table km2 }
mapKM :: (a -> b) -> KeylessMap a -> KeylessMap b
mapKM f km = km { table = fmap f $ table km }
mapWithKeyKM :: (Key -> a -> b) -> KeylessMap a
-> KeylessMap b
mapWithKeyKM f km = km { table = M.mapWithKey f $ table km }
-- -----------------------------------------------------------------------------
instance Keyless (KeylessMap a) where
type Elem (KeylessMap a) = a
empty = initKM
{-# INLINE empty #-}
insert = insertKM
{-# INLINE insert #-}
insertBulk = insertBulkKM
{-# INLINE insertBulk #-}
delete = deleteKM
{-# INLINE delete #-}
deleteBulk = deleteBulkKM
{-# INLINE deleteBulk #-}
lookup = lookupKM
{-# INLINE lookup #-}
unsafeLookup = unsafeLookupKM
{-# INLINE unsafeLookup #-}
hasEntry = hasEntryKM
{-# INLINE hasEntry #-}
adjust = adjustKM
{-# INLINE adjust #-}
size = sizeKM
{-# INLINE size #-}
minKey = minKeyKM
{-# INLINE minKey #-}
maxKey = maxKeyKM
{-# INLINE maxKey #-}
isNull = isNullKM
{-# INLINE isNull #-}
keys = keysKM
{-# INLINE keys #-}
values = valuesKM
{-# INLINE values #-}
assocs = assocsKM
{-# INLINE assocs #-}
fromList = fromListKM
{-# INLINE fromList #-}
unsafeFromListWithKeys = unsafeFromListWithKeysKM
{-# INLINE unsafeFromListWithKeys #-}
merge = mergeKM
{-# INLINE merge #-}
mergeAll = mergeAllKM
{-# INLINE mergeAll #-}
difference = differenceKM
{-# INLINE difference #-}
instance FKeyless KeylessMap a where
mapWithKey = mapWithKeyKM
{-# INLINE mapWithKey #-}
| ivan-m/keyless-entry | Data/Keyless/Map/Lazy.hs | bsd-3-clause | 5,544 | 0 | 12 | 1,468 | 1,730 | 929 | 801 | 137 | 1 |
module Main
( main -- :: IO ()
) where
import Criterion.Main (bgroup, defaultMain)
-- Import our benchmark suites
import qualified Mini as Mini
import qualified Macro as Macro
import qualified Micro as Micro
import qualified Instances as Inst
import Utils (prepBenchmarkFiles)
-- A simple driver, for running every set of benchmarks.
main :: IO ()
main = prepBenchmarkFiles >> defaultMain
[ bgroup "instance" Inst.benchmarks
, bgroup "micro" Micro.benchmarks
, bgroup "mini" Mini.benchmarks
, bgroup "macro" Macro.benchmarks
]
| arianvp/binary-serialise-cbor | bench/Main.hs | bsd-3-clause | 597 | 0 | 9 | 148 | 119 | 73 | 46 | 14 | 1 |
module Y21.D02 where
import Imports
import Util
data Op = Down | Up | Forward deriving Show
data Command = Command { op :: Op, value :: Int } deriving Show
data PosDepth = PosDepth { pos, depth :: Int } deriving Show
data PosDepthAim = PosDepthAim { pos, depth, aim :: Int } deriving Show
-- down 5
-- up 3
-- forward 5
commands :: Parser [Command]
commands =
command `endBy` eol
where
command :: Parser Command
command = Command <$> op <* pad <*> decimal
op :: Parser Op
op = try (string "forward") $> Forward
<|> try (string "down") $> Down
<|> string "up" $> Up
solve1 :: String -> Int
solve1 =
(\(PosDepth p d) -> p * d)
. foldl' move (PosDepth 0 0)
. parseOrDie commands
where
move :: PosDepth -> Command -> PosDepth
move pd@(PosDepth p d) (Command op v) = case op of
Down -> pd {depth = d + v}
Up -> pd {depth = d - v}
Forward -> pd {pos = p + v}
solve2 :: String -> Int
solve2 =
(\(PosDepthAim p d _) -> p * d)
. foldl' move (PosDepthAim 0 0 0)
. parseOrDie commands
where
move :: PosDepthAim -> Command -> PosDepthAim
move pda@(PosDepthAim p d a) (Command op v) = case op of
Down -> pda {aim = a + v}
Up -> pda {aim = a - v}
Forward -> pda {pos = p + v, depth = d + (a * v)}
| oshyshko/adventofcode | src/Y21/D02.hs | bsd-3-clause | 1,425 | 0 | 14 | 494 | 543 | 300 | 243 | 36 | 3 |
-- | Internal module.
module Network.Hawk.Internal.Client.HeaderParser
( parseWwwAuthenticateHeader
, parseServerAuthorizationHeader
, WwwAuthenticateHeader(..)
, ServerAuthorizationHeader(..)
) where
import Data.ByteString (ByteString)
import Data.Time.Clock.POSIX (POSIXTime)
import Network.Hawk.Types
import Control.Monad (join)
import Network.Hawk.Util
parseWwwAuthenticateHeader :: ByteString -> Either String WwwAuthenticateHeader
parseWwwAuthenticateHeader = fmap snd . parseHeader wwwKeys wwwAuthHeader
parseServerAuthorizationHeader :: ByteString -> Either String ServerAuthorizationHeader
parseServerAuthorizationHeader = fmap snd . parseHeader serverKeys serverAuthReplyHeader
wwwKeys = ["error", "tsm", "ts"]
serverKeys = ["mac", "ext", "hash"]
wwwAuthHeader :: AuthAttrs -> Either String WwwAuthenticateHeader
wwwAuthHeader m = do
err <- authAttr m "error"
case authAttrMaybe m "ts" of
Just ts' -> do
ts <- readTs ts'
tsm <- authAttr m "tsm"
return $ WwwAuthenticateHeader err (Just ts) (Just tsm)
Nothing -> return $ WwwAuthenticateHeader err Nothing Nothing
serverAuthReplyHeader :: AuthAttrs -> Either String ServerAuthorizationHeader
serverAuthReplyHeader m = do
mac <- authAttr m "mac"
let hash = authAttrMaybe m "hash"
let ext = authAttrMaybe m "ext"
return $ ServerAuthorizationHeader mac hash ext
| rvl/hsoz | src/Network/Hawk/Internal/Client/HeaderParser.hs | bsd-3-clause | 1,378 | 0 | 15 | 213 | 362 | 187 | 175 | 31 | 2 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE TypeApplications #-}
module Builder (builder) where
import Criterion.Main
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Lazy as BL
import qualified Data.Builder as B
import qualified "stdio" Data.Binary as B
import qualified "stdio" Data.Vector as V
import Control.DeepSeq
import Control.Monad
import Control.Exception (evaluate)
import Data.Monoid ((<>))
import Data.Word
bytestring1000 :: BS.ByteString
bytestring1000 = BS.replicate 1000 0
bytes1000 :: V.Bytes
bytes1000 = V.replicate 1000 0
bytestring20000 :: BS.ByteString
bytestring20000 = BS.replicate 20000 0
bytes20000 :: V.Bytes
bytes20000 = V.replicate 20000 0
builder :: [Benchmark]
builder =
[ bgroup "word8 100000000" word8_100000000
, bgroup "word8 10000" word8_10000
, bgroup "word8 32" word8_32
, bgroup "bytestring/bytes 32 * 1000" bytes_32_1000
, bgroup "bytestring/bytes 32 * 20000" bytes_32_20000
]
word8_100000000 :: [Benchmark]
word8_100000000 =
[ bench "bytestring/toLazyByteString" $ nf BB.toLazyByteString (mconcat (replicate 100000000 (BB.word8 123)))
, bench "bytestring/toStrict . toLazyByteString" $ nf (BL.toStrict . BB.toLazyByteString) (mconcat (replicate 100000000 (BB.word8 123)))
, bench "stdio/buildBytesList" $ nf B.buildBytesList (mconcat (replicate 100000000 (B.binary @Word8 123)))
, bench "stdio/buildBytes" $ nf B.buildBytes (mconcat (replicate 100000000 (B.binary @Word8 123)))
, bench "stdio/buildAndRun" $ nfIO (B.buildAndRun (void . evaluate) (mconcat (replicate 100000000 (B.binary @Word8 123))))
]
word8_10000 :: [Benchmark]
word8_10000 =
[ bench "bytestring/toLazyByteString" $ nf BB.toLazyByteString (mconcat (replicate 10000 (BB.word8 123)))
, bench "bytestring/toStrict . toLazyByteString" $ nf (BL.toStrict . BB.toLazyByteString) (mconcat (replicate 10000 (BB.word8 123)))
, bench "stdio/buildBytesList" $ nf B.buildBytesList (mconcat (replicate 10000 (B.binary @Word8 123)))
, bench "stdio/buildBytes" $ nf B.buildBytes (mconcat (replicate 10000 (B.binary @Word8 123)))
, bench "stdio/buildAndRun" $ nfIO (B.buildAndRun (void . evaluate) (mconcat (replicate 10000 (B.binary @Word8 123))))
]
word8_32 :: [Benchmark]
word8_32 =
[ bench "bytestring/toLazyByteString" $ nf BB.toLazyByteString (mconcat (replicate 32 (BB.word8 123)))
, bench "bytestring/toStrict . toLazyByteString" $ nf (BL.toStrict . BB.toLazyByteString) (mconcat (replicate 32 (BB.word8 123)))
, bench "stdio/buildBytesList" $ nf B.buildBytesList (mconcat (replicate 32 (B.binary @Word8 123)))
, bench "stdio/buildBytes" $ nf B.buildBytes (mconcat (replicate 32 (B.binary @Word8 123)))
, bench "stdio/buildAndRun" $ nfIO (B.buildAndRun (void . evaluate) (mconcat (replicate 32 (B.binary @Word8 123))))
]
bytes_32_1000 :: [Benchmark]
bytes_32_1000 =
[ bench "bytestring/toLazyByteString" $ nf BB.toLazyByteString
(mconcat (replicate 32 $ BB.byteString bytestring1000))
, bench "stdio/buildBytesList" $ nf B.buildBytesList (mconcat (replicate 32 (B.binary bytes1000)))
, bench "stdio/buildAndRun" $ nfIO (B.buildAndRun (void . evaluate) (mconcat (replicate 32 (B.binary bytes1000))))
]
bytes_32_20000 :: [Benchmark]
bytes_32_20000 =
[ bench "bytestring/toLazyByteString" $ nf BB.toLazyByteString
(mconcat (replicate 32 $ BB.byteString bytestring20000))
, bench "stdio/buildBytesList" $ nf B.buildBytesList (mconcat (replicate 32 (B.binary bytes20000)))
, bench "stdio/buildAndRun" $ nfIO (B.buildAndRun (void . evaluate) (mconcat (replicate 32 (B.binary bytes20000))))
]
| winterland1989/stdio | bench/Builder.hs | bsd-3-clause | 3,855 | 0 | 16 | 645 | 1,247 | 650 | 597 | 66 | 1 |
-- | Get time zones.
module Data.Time.Zone where
import Data.Time.Format
import Data.Time.LocalTime
import System.Locale
-- | Get the time zone by name.
getZone :: String -> Maybe TimeZone
getZone zone =
case parseTime defaultTimeLocale "%F%T%Z" ("2000-01-0100:00:00" ++ zone) of
Just (ZonedTime _ timeZone) -> Just timeZone
_ ->
Nothing
| chrisdone/chrisdone-xmonad | src/Data/Time/Zone.hs | bsd-3-clause | 357 | 0 | 10 | 68 | 90 | 49 | 41 | 10 | 2 |
{-# LANGUAGE GADTs #-}
import Prelude hiding (True, False)
data T where
True :: T
False :: T
If :: T -> T -> T -> T
O :: T
Succ :: T -> T
Pred :: T -> T
IsO :: T -> T
data V where
O :: V
Succ :: V -> V
data Eval where
ESucc :: (a -> b) -> T a -> T b
EPred ::
eval :: T -> T
eval (Succ t) = Succ t'
| utky/lambda-cute | src/Language/Arith/Syntax.hs | bsd-3-clause | 326 | 1 | 8 | 111 | 156 | 87 | 69 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Data.HashPSQ.Internal
( -- * Type
Bucket (..)
, mkBucket
, HashPSQ (..)
-- * Query
, null
, size
, member
, lookup
, findMin
-- * Construction
, empty
, singleton
-- * Insertion
, insert
-- * Delete/update
, delete
, deleteMin
, alter
, alterMin
-- * Lists
, fromList
, toList
, keys
-- * Views
, insertView
, deleteView
, minView
, atMostView
-- * Traversal
, map
, unsafeMapMonotonic
, fold'
-- * Unsafe operations
, unsafeLookupIncreasePriority
, unsafeInsertIncreasePriority
, unsafeInsertIncreasePriorityView
-- * Validity check
, valid
) where
import Control.DeepSeq (NFData (..))
import Data.Foldable (Foldable)
import Data.Hashable
import qualified Data.List as List
import Data.Maybe (isJust)
import Data.Traversable
import Prelude hiding (foldr, lookup, map, null)
import qualified Data.IntPSQ.Internal as IntPSQ
import qualified Data.OrdPSQ as OrdPSQ
------------------------------------------------------------------------------
-- Types
------------------------------------------------------------------------------
data Bucket k p v = B !k !v !(OrdPSQ.OrdPSQ k p v)
deriving (Foldable, Functor, Show, Traversable)
-- | Smart constructor which takes care of placing the minimum element directly
-- in the 'Bucket'.
{-# INLINABLE mkBucket #-}
mkBucket
:: (Ord k, Ord p)
=> k -> p -> v -> OrdPSQ.OrdPSQ k p v -> (p, Bucket k p v)
mkBucket k p x opsq =
-- TODO (jaspervdj): We could do an 'unsafeInsertNew' here for all call
-- sites.
case toBucket (OrdPSQ.insert k p x opsq) of
Just bucket -> bucket
Nothing -> error $ "mkBucket: internal error"
toBucket :: (Ord k, Ord p) => OrdPSQ.OrdPSQ k p v -> Maybe (p, Bucket k p v)
toBucket opsq = case OrdPSQ.minView opsq of
Just (k, p, x, opsq') -> Just (p, B k x opsq')
Nothing -> Nothing
instance (NFData k, NFData p, NFData v) => NFData (Bucket k p v) where
rnf (B k v x) = rnf k `seq` rnf v `seq` rnf x
-- | A priority search queue with keys of type @k@ and priorities of type @p@
-- and values of type @v@. It is strict in keys, priorities and values.
newtype HashPSQ k p v = HashPSQ (IntPSQ.IntPSQ p (Bucket k p v))
deriving (Foldable, Functor, NFData, Show, Traversable)
instance (Eq k, Eq p, Eq v, Hashable k, Ord k, Ord p) =>
Eq (HashPSQ k p v) where
x == y = case (minView x, minView y) of
(Nothing , Nothing ) -> True
(Just (xk, xp, xv, x'), (Just (yk, yp, yv, y'))) ->
xk == yk && xp == yp && xv == yv && x' == y'
(Just _ , Nothing ) -> False
(Nothing , Just _ ) -> False
------------------------------------------------------------------------------
-- Query
------------------------------------------------------------------------------
-- | /O(1)/ True if the queue is empty.
{-# INLINABLE null #-}
null :: HashPSQ k p v -> Bool
null (HashPSQ ipsq) = IntPSQ.null ipsq
-- | /O(n)/ The number of elements stored in the PSQ.
{-# INLINABLE size #-}
size :: HashPSQ k p v -> Int
size (HashPSQ ipsq) = IntPSQ.fold'
(\_ _ (B _ _ opsq) acc -> 1 + OrdPSQ.size opsq + acc)
0
ipsq
-- | /O(min(n,W))/ Check if a key is present in the the queue.
{-# INLINABLE member #-}
member :: (Hashable k, Ord k, Ord p) => k -> HashPSQ k p v -> Bool
member k = isJust . lookup k
-- | /O(min(n,W))/ The priority and value of a given key, or 'Nothing' if the
-- key is not bound.
{-# INLINABLE lookup #-}
lookup :: (Ord k, Hashable k, Ord p) => k -> HashPSQ k p v -> Maybe (p, v)
lookup k (HashPSQ ipsq) = do
(p0, B k0 v0 os) <- IntPSQ.lookup (hash k) ipsq
if k0 == k
then return (p0, v0)
else OrdPSQ.lookup k os
-- | /O(1)/ The element with the lowest priority.
findMin :: (Hashable k, Ord k, Ord p) => HashPSQ k p v -> Maybe (k, p, v)
findMin (HashPSQ ipsq) = case IntPSQ.findMin ipsq of
Nothing -> Nothing
Just (_, p, B k x _) -> Just (k, p, x)
--------------------------------------------------------------------------------
-- Construction
--------------------------------------------------------------------------------
-- | /O(1)/ The empty queue.
empty :: HashPSQ k p v
empty = HashPSQ IntPSQ.empty
-- | /O(1)/ Build a queue with one element.
singleton :: (Hashable k, Ord k, Ord p) => k -> p -> v -> HashPSQ k p v
singleton k p v = insert k p v empty
--------------------------------------------------------------------------------
-- Insertion
--------------------------------------------------------------------------------
-- | /O(min(n,W))/ Insert a new key, priority and value into the queue. If the key
-- is already present in the queue, the associated priority and value are
-- replaced with the supplied priority and value.
{-# INLINABLE insert #-}
insert
:: (Ord k, Hashable k, Ord p)
=> k -> p -> v -> HashPSQ k p v -> HashPSQ k p v
insert k p v (HashPSQ ipsq) =
case IntPSQ.alter (\x -> ((), ins x)) (hash k) ipsq of
((), ipsq') -> HashPSQ ipsq'
where
ins Nothing = Just (p, B k v (OrdPSQ.empty))
ins (Just (p', B k' v' os))
| k' == k =
-- Tricky: p might have less priority than an item in 'os'.
Just (mkBucket k p v os)
| p' < p || (p == p' && k' < k) =
Just (p', B k' v' (OrdPSQ.insert k p v os))
| OrdPSQ.member k os =
-- This is a bit tricky: k might already be present in 'os' and we
-- don't want to end up with duplicate keys.
Just (p, B k v (OrdPSQ.insert k' p' v' (OrdPSQ.delete k os)))
| otherwise =
Just (p , B k v (OrdPSQ.insert k' p' v' os))
--------------------------------------------------------------------------------
-- Delete/update
--------------------------------------------------------------------------------
-- | /O(min(n,W))/ Delete a key and its priority and value from the queue. When
-- the key is not a member of the queue, the original queue is returned.
{-# INLINE delete #-}
delete
:: (Hashable k, Ord k, Ord p) => k -> HashPSQ k p v -> HashPSQ k p v
delete k t = case deleteView k t of
Nothing -> t
Just (_, _, t') -> t'
-- | /O(min(n,W))/ Delete the binding with the least priority, and return the
-- rest of the queue stripped of that binding. In case the queue is empty, the
-- empty queue is returned again.
{-# INLINE deleteMin #-}
deleteMin
:: (Hashable k, Ord k, Ord p) => HashPSQ k p v -> HashPSQ k p v
deleteMin t = case minView t of
Nothing -> t
Just (_, _, _, t') -> t'
-- | /O(min(n,W))/ The expression @alter f k queue@ alters the value @x@ at @k@,
-- or absence thereof. 'alter' can be used to insert, delete, or update a value
-- in a queue. It also allows you to calculate an additional value @b@.
{-# INLINABLE alter #-}
alter :: (Hashable k, Ord k, Ord p)
=> (Maybe (p, v) -> (b, Maybe (p, v)))
-> k -> HashPSQ k p v -> (b, HashPSQ k p v)
alter f k (HashPSQ ipsq) = case IntPSQ.deleteView h ipsq of
Nothing -> case f Nothing of
(b, Nothing) -> (b, HashPSQ ipsq)
(b, Just (p, x)) ->
(b, HashPSQ $ IntPSQ.unsafeInsertNew h p (B k x OrdPSQ.empty) ipsq)
Just (bp, B bk bx opsq, ipsq')
| k == bk -> case f (Just (bp, bx)) of
(b, Nothing) -> case toBucket opsq of
Nothing -> (b, HashPSQ ipsq')
Just (bp', bucket') ->
(b, HashPSQ $ IntPSQ.unsafeInsertNew h bp' bucket' ipsq')
(b, Just (p, x)) -> case mkBucket k p x opsq of
(bp', bucket') ->
(b, HashPSQ $ IntPSQ.unsafeInsertNew h bp' bucket' ipsq')
| otherwise -> case OrdPSQ.alter f k opsq of
(b, opsq') -> case mkBucket bk bp bx opsq' of
(bp', bucket') ->
(b, HashPSQ $ IntPSQ.unsafeInsertNew h bp' bucket' ipsq')
where
h = hash k
-- | /O(min(n,W))/ A variant of 'alter' which works on the element with the
-- minimum priority. Unlike 'alter', this variant also allows you to change the
-- key of the element.
{-# INLINABLE alterMin #-}
alterMin
:: (Hashable k, Ord k, Ord p)
=> (Maybe (k, p, v) -> (b, Maybe (k, p, v)))
-> HashPSQ k p v
-> (b, HashPSQ k p v)
alterMin f t0 =
let (t, mbX) = case minView t0 of
Nothing -> (t0, Nothing)
Just (k, p, x, t0') -> (t0', Just (k, p, x))
in case f mbX of
(b, mbX') ->
(b, maybe t (\(k, p, x) -> insert k p x t) mbX')
--------------------------------------------------------------------------------
-- Lists
--------------------------------------------------------------------------------
-- | /O(n*min(n,W))/ Build a queue from a list of (key, priority, value) tuples.
-- If the list contains more than one priority and value for the same key, the
-- last priority and value for the key is retained.
{-# INLINABLE fromList #-}
fromList :: (Hashable k, Ord k, Ord p) => [(k, p, v)] -> HashPSQ k p v
fromList = List.foldl' (\psq (k, p, x) -> insert k p x psq) empty
-- | /O(n)/ Convert a queue to a list of (key, priority, value) tuples. The
-- order of the list is not specified.
{-# INLINABLE toList #-}
toList :: (Hashable k, Ord k, Ord p) => HashPSQ k p v -> [(k, p, v)]
toList (HashPSQ ipsq) =
[ (k', p', x')
| (_, p, (B k x opsq)) <- IntPSQ.toList ipsq
, (k', p', x') <- (k, p, x) : OrdPSQ.toList opsq
]
-- | /O(n)/ Obtain the list of present keys in the queue.
{-# INLINABLE keys #-}
keys :: (Hashable k, Ord k, Ord p) => HashPSQ k p v -> [k]
keys t = [k | (k, _, _) <- toList t]
--------------------------------------------------------------------------------
-- Views
--------------------------------------------------------------------------------
-- | /O(min(n,W))/ Insert a new key, priority and value into the queue. If the key
-- is already present in the queue, then the evicted priority and value can be
-- found the first element of the returned tuple.
{-# INLINABLE insertView #-}
insertView
:: (Hashable k, Ord k, Ord p)
=> k -> p -> v -> HashPSQ k p v -> (Maybe (p, v), HashPSQ k p v)
insertView k p x t =
-- TODO (jaspervdj): Can be optimized easily
case deleteView k t of
Nothing -> (Nothing, insert k p x t)
Just (p', x', _) -> (Just (p', x'), insert k p x t)
-- | /O(min(n,W))/ Delete a key and its priority and value from the queue. If
-- the key was present, the associated priority and value are returned in
-- addition to the updated queue.
{-# INLINABLE deleteView #-}
deleteView
:: forall k p v. (Hashable k, Ord k, Ord p)
=> k -> HashPSQ k p v -> Maybe (p, v, HashPSQ k p v)
deleteView k (HashPSQ ipsq) = case IntPSQ.alter f (hash k) ipsq of
(Nothing, _ ) -> Nothing
(Just (p, x), ipsq') -> Just (p, x, HashPSQ ipsq')
where
f :: Maybe (p, Bucket k p v) -> (Maybe (p, v), Maybe (p, Bucket k p v))
f Nothing = (Nothing, Nothing)
f (Just (p, B bk bx opsq))
| k == bk = case OrdPSQ.minView opsq of
Nothing -> (Just (p, bx), Nothing)
Just (k', p', x', opsq') -> (Just (p, bx), Just (p', B k' x' opsq'))
| otherwise = case OrdPSQ.deleteView k opsq of
Nothing -> (Nothing, Nothing)
Just (p', x', opsq') -> (Just (p', x'), Just (p, B bk bx opsq'))
-- | /O(min(n,W))/ Retrieve the binding with the least priority, and the
-- rest of the queue stripped of that binding.
{-# INLINABLE minView #-}
minView
:: (Hashable k, Ord k, Ord p)
=> HashPSQ k p v -> Maybe (k, p, v, HashPSQ k p v)
minView (HashPSQ ipsq ) =
case IntPSQ.alterMin f ipsq of
(Nothing , _ ) -> Nothing
(Just (k, p, x), ipsq') -> Just (k, p, x, HashPSQ ipsq')
where
f Nothing = (Nothing, Nothing)
f (Just (h, p, B k x os)) = case OrdPSQ.minView os of
Nothing ->
(Just (k, p, x), Nothing)
Just (k', p', x', os') ->
(Just (k, p, x), Just (h, p', B k' x' os'))
-- | Return a list of elements ordered by key whose priorities are at most @pt@,
-- and the rest of the queue stripped of these elements. The returned list of
-- elements can be in any order: no guarantees there.
{-# INLINABLE atMostView #-}
atMostView
:: (Hashable k, Ord k, Ord p)
=> p -> HashPSQ k p v -> ([(k, p, v)], HashPSQ k p v)
atMostView pt (HashPSQ t0) =
(returns, HashPSQ t2)
where
-- First we use 'IntPSQ.atMostView' to get a collection of buckets that have
-- /AT LEAST/ one element with a low priority. Buckets will usually only
-- contain a single element.
(buckets, t1) = IntPSQ.atMostView pt t0
-- We now need to run through the buckets. This will give us a list of
-- elements to return and a bunch of buckets to re-insert.
(returns, reinserts) = go [] [] buckets
where
-- We use two accumulators, for returns and re-inserts.
go rets reins [] = (rets, reins)
go rets reins ((_, p, B k v opsq) : bs) =
-- Note that 'elems' should be very small, ideally a null list.
let (elems, opsq') = OrdPSQ.atMostView pt opsq
rets' = (k, p, v) : elems ++ rets
reins' = case toBucket opsq' of
Nothing -> reins
Just (p', b) -> ((p', b) : reins)
in go rets' reins' bs
-- Now we can do the re-insertion pass.
t2 = List.foldl'
(\t (p, b@(B k _ _)) -> IntPSQ.unsafeInsertNew (hash k) p b t)
t1
reinserts
--------------------------------------------------------------------------------
-- Traversals
--------------------------------------------------------------------------------
-- | /O(n)/ Modify every value in the queue.
{-# INLINABLE map #-}
map :: (k -> p -> v -> w) -> HashPSQ k p v -> HashPSQ k p w
map f (HashPSQ ipsq) = HashPSQ (IntPSQ.map (\_ p v -> mapBucket p v) ipsq)
where
mapBucket p (B k v opsq) = B k (f k p v) (OrdPSQ.map f opsq)
-- | /O(n)/ Maps a function over the values and priorities of the queue.
-- The function @f@ must be monotonic with respect to the priorities. I.e. if
-- @x < y@, then @fst (f k x v) < fst (f k y v)@.
-- /The precondition is not checked./ If @f@ is not monotonic, then the result
-- will be invalid.
{-# INLINABLE unsafeMapMonotonic #-}
unsafeMapMonotonic
:: (k -> p -> v -> (q, w))
-> HashPSQ k p v
-> HashPSQ k q w
unsafeMapMonotonic f (HashPSQ ipsq) =
HashPSQ (IntPSQ.unsafeMapMonotonic (\_ p v -> mapBucket p v) ipsq)
where
mapBucket p (B k v opsq) =
let (p', v') = f k p v
in (p', B k v' (OrdPSQ.unsafeMapMonotonic f opsq))
-- | /O(n)/ Strict fold over every key, priority and value in the queue. The order
-- in which the fold is performed is not specified.
{-# INLINABLE fold' #-}
fold' :: (k -> p -> v -> a -> a) -> a -> HashPSQ k p v -> a
fold' f acc0 (HashPSQ ipsq) = IntPSQ.fold' goBucket acc0 ipsq
where
goBucket _ p (B k v opsq) acc =
let !acc1 = f k p v acc
!acc2 = OrdPSQ.fold' f acc1 opsq
in acc2
--------------------------------------------------------------------------------
-- Unsafe operations
--------------------------------------------------------------------------------
{-# INLINABLE unsafeLookupIncreasePriority #-}
unsafeLookupIncreasePriority
:: (Hashable k, Ord k, Ord p)
=> k -> p -> HashPSQ k p v -> (Maybe (p, v), HashPSQ k p v)
unsafeLookupIncreasePriority k p (HashPSQ ipsq) =
(mbPV, HashPSQ ipsq')
where
(!mbPV, !ipsq') = IntPSQ.unsafeLookupIncreasePriority
(\bp b@(B bk bx opsq) ->
if k == bk
then let (bp', b') = mkBucket k p bx opsq
in (Just (bp, bx), bp', b')
-- TODO (jaspervdj): Still a lookup-insert here: 3 traversals?
else case OrdPSQ.lookup k opsq of
Nothing -> (Nothing, bp, b)
Just (p', x) ->
let b' = B bk bx (OrdPSQ.insert k p x opsq)
in (Just (p', x), bp, b'))
(hash k)
ipsq
{-# INLINABLE unsafeInsertIncreasePriority #-}
unsafeInsertIncreasePriority
:: (Hashable k, Ord k, Ord p)
=> k -> p -> v -> HashPSQ k p v -> HashPSQ k p v
unsafeInsertIncreasePriority k p x (HashPSQ ipsq) = HashPSQ $
IntPSQ.unsafeInsertWithIncreasePriority
(\_ _ bp (B bk bx opsq) ->
if k == bk
then mkBucket k p x opsq
else (bp, B bk bx (OrdPSQ.insert k p x opsq)))
(hash k)
p
(B k x OrdPSQ.empty)
ipsq
{-# INLINABLE unsafeInsertIncreasePriorityView #-}
unsafeInsertIncreasePriorityView
:: (Hashable k, Ord k, Ord p)
=> k -> p -> v -> HashPSQ k p v -> (Maybe (p, v), HashPSQ k p v)
unsafeInsertIncreasePriorityView k p x (HashPSQ ipsq) =
(mbEvicted, HashPSQ ipsq')
where
(mbBucket, ipsq') = IntPSQ.unsafeInsertWithIncreasePriorityView
(\_ _ bp (B bk bx opsq) ->
if k == bk
then mkBucket k p x opsq
else (bp, B bk bx (OrdPSQ.insert k p x opsq)))
(hash k)
p
(B k x OrdPSQ.empty)
ipsq
mbEvicted = case mbBucket of
Nothing -> Nothing
Just (bp, B bk bv opsq)
| k == bk -> Just (bp, bv)
| otherwise -> OrdPSQ.lookup k opsq
--------------------------------------------------------------------------------
-- Validity check
--------------------------------------------------------------------------------
-- | /O(n^2)/ Internal function to check if the 'HashPSQ' is valid, i.e. if all
-- invariants hold. This should always be the case.
valid :: (Hashable k, Ord k, Ord p) => HashPSQ k p v -> Bool
valid t@(HashPSQ ipsq) =
not (hasDuplicateKeys t) &&
and [validBucket k p bucket | (k, p, bucket) <- IntPSQ.toList ipsq]
hasDuplicateKeys :: (Hashable k, Ord k, Ord p) => HashPSQ k p v -> Bool
hasDuplicateKeys = any (> 1) . List.map length . List.group . List.sort . keys
validBucket :: (Hashable k, Ord k, Ord p) => Int -> p -> Bucket k p v -> Bool
validBucket h p (B k _ opsq) =
OrdPSQ.valid opsq &&
-- Check that the first element of the bucket has lower priority than all
-- the other elements.
and [(p, k) < (p', k') && hash k' == h | (k', p', _) <- OrdPSQ.toList opsq]
| bttr/psqueues | src/Data/HashPSQ/Internal.hs | bsd-3-clause | 19,205 | 0 | 21 | 5,541 | 5,720 | 3,074 | 2,646 | 302 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Jobutil
Description : Helper functions for Jobpack module
-}
module Jobutil
where
import Data.List
import Reader (split_scheme_loc, http_reader)
import Data.Aeson
import Data.Maybe (fromJust)
import Control.Applicative ((<$>), (<*>), empty)
import Control.Monad
import qualified Data.ByteString.Lazy.Char8 as BL
-- | Handle ddfs inputs differently
get_effective_inputs :: [String] -> IO [[String]]
get_effective_inputs inputs =
mapM ddfs_change inputs
-- | Checks a type of input if it is ddfs tag asks for actual input location
ddfs_change :: String -> IO [String]
ddfs_change inpt =
let (scheme, rest) = split_scheme_loc inpt in
if scheme == "tag"
then do urls <- get_urls rest
return $ map head urls --TODO only first replica location
else return [inpt]
-- | Asks Disco for inputs associated with a tag
get_urls :: String -> IO [[String]]
get_urls tag = do
let url = tag_url tag
body <- http_reader url
--check respone status
let (_,_,urls) = tag_info body
return urls
data Tag_info = Tag_info {
version :: Maybe Int,
tag_id :: Maybe String,
last_modified :: Maybe String,
urls :: Maybe [[String]],
user_data :: Maybe [(String,String)]
} deriving (Show)
--TODO change that instance (problem: last_modified and user_data)
instance FromJSON Tag_info where
parseJSON (Object v) =
Tag_info <$>
(v .:? "version") <*>
(v .:? "id") <*>
(v .:? "last_modified") <*>
(v .:? "urls") <*>
(v .:? "user_data")
parseJSON _ = empty
tag_info :: String -> (Int, String, [[String]])
tag_info body = (fromJust $ version t_info,fromJust $ tag_id t_info,fromJust $ urls t_info)
where
t_info = fromJust (decode $ BL.pack body :: Maybe Tag_info)
--TODO hardcoded
tag_url :: String -> String
tag_url tag =
--"http://" ++ Setting("DISCO_MASTER_HOST") ++ ":" ++ Setting("DISCO_PORT") ++ "/ddfs/tag/" ++ tag
"http://" ++ "localhost" ++ ":" ++ "8989" ++ "/ddfs/tag/" ++ tag
| zuzia/haskell_worker | src/Jobutil.hs | bsd-3-clause | 2,077 | 0 | 12 | 468 | 562 | 308 | 254 | 47 | 2 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE PatternGuards #-}
module Game
(
-- Initial position using default board representation and its type.
initialPosition
, GamePosition
-- Initial position (polymorphic).
, position0
-- Applying moves (one and many) to a position.
, doMove
, doMoves
-- Legal moves in this position.
, legalMoves
-- Checks whether round is over and gets its final value.
, isOver
, roundResult
-- Positions which may result from given position by applying
-- certain number of legal moves.
, legalPositions
-- Positions which may result from given position by applying one
-- move.
, nextPositions
) where
import Types
import Utils
import Board.VectorBased
type GamePosition = Position VBoard
position0 :: Board b => Position b
position0 = Position
{ pBoard = board0
, pPlayer = Black
, pMoves = []
, pMoveNo = 0
}
initialPosition :: GamePosition
initialPosition = position0
doMove :: Board b => Move -> Position b -> Position b
doMove m Position {..} = Position
{ pBoard = updateBoard pBoard m
, pPlayer = opponent pPlayer
, pMoves = m : pMoves
, pMoveNo = pMoveNo + 1
}
doMoves :: Board b => [Move] -> Position b -> Position b
doMoves [] p = p
doMoves (m:ms) p =
let p' = doMove m p
in if isOver p then p' else doMoves ms p'
{-# SPECIALIZE roundResult :: Position VBoard -> Result #-}
roundResult :: Board b => Position b -> Result
roundResult Position{..}
| reachedHomeRow || isDeadlock = Winner $ opponent pPlayer
| otherwise = InProgress
where
reachedHomeRow = case pMoves of
[] -> False
Move _ to :_ -> snd to == homeRow pPlayer
isDeadlock = case pMoves of
m:pm:_ -> isPass m && isPass pm
_ -> False
isOver :: Board b => Position b -> Bool
isOver p | Winner _ <- roundResult p = True
isOver _ = False
{-# SPECIALIZE legalPositions :: Int -> Position VBoard -> [Position VBoard] #-}
legalPositions :: Board b => Int -> Position b -> [Position b]
legalPositions 0 p = [p]
legalPositions d p = [ doMove m p | m <- legalMoves p ] >>= legalPositions (d - 1)
nextPositions :: Board b => Position b -> [Position b]
nextPositions = legalPositions 1
--- Move generation.
----------------------------------------
legalMoves :: Board b => Position b -> [Move]
legalMoves r | isOver r = []
legalMoves r = if null moves then passMove r else moves where
moves = [ Move from to
| from <- requiredFroms r
, to <- possibleTos from (pPlayer r) (pBoard r)
]
possibleTos :: Board b => Coord -> Player -> b -> [Coord]
possibleTos (x,y) p b =
-- Generate moves in the nicely sorted order, so that we don't need
-- to sort them later. We put longer moves first, since they are
-- typically more forcing.
reverse $ mergeSorted (longerFirst p snd) $
case p of
Black ->
[ takeValid [ (x, y+i) | i <- [1 .. 8-y] ] -- straight up
, takeValid [ (x-i,y+i) | i <- [1 .. min (x-1) (8-y)] ] -- left up
, takeValid [ (x+i,y+i) | i <- [1 .. min (8-x) (8-y)] ] -- right up
]
White ->
[ takeValid [ (x, y-i) | i <- [1 .. y-1] ] -- straight down
, takeValid [ (x-i,y-i) | i <- [1 .. min (x-1) (y-1)] ] -- left down
, takeValid [ (x+i,y-i) | i <- [1 .. min (8-x) (y-1)] ] -- right down
]
where
takeValid = takeWhile (fieldIsEmpty b)
longerFirst :: Player -> (a -> Int) -> a -> a -> Bool
longerFirst p f y1 y2 = case p of
Black -> f y1 < f y2
White -> f y1 > f y2
requiredFrom :: Board b => Position b -> Move -> Coord
requiredFrom Position{..} (Move _ to) =
pieceCoord pBoard pPlayer (fieldColor pBoard to)
requiredFroms :: Board b => Position b -> [Coord]
requiredFroms p = case pMoves p of
[] -> initialFroms (pPlayer p)
m : _ -> [ requiredFrom p m ]
initialFroms :: Player -> [Coord]
initialFroms Black = [(x,1) | x <- [1..8]]
initialFroms White = [(x,8) | x <- [1..8]]
passMove :: Board b => Position b -> [Move]
passMove p = case pMoves p of
[] -> []
m:_ -> let from = requiredFrom p m
in [Move from from]
isPass :: Move -> Bool
isPass (Move from to) = from == to
homeRow :: Player -> Int
homeRow Black = 1
homeRow White = 8
| sphynx/hamisado | Game.hs | bsd-3-clause | 4,252 | 0 | 17 | 1,089 | 1,552 | 812 | 740 | 100 | 3 |
{-| The meat of the master node: Calls @gipeda@ and optionally deploys the
website via @rsync@.
Upon deployment, repositories are mapped to URLs via specific policies
verified in @sshSubPathTestFailures@.
-}
module FeedGipeda.Master.Finalize
( regenerateAndDeploy
) where
import Control.Logging as Logging
import Control.Monad (unless, void, when)
import Data.Aeson ((.=))
import qualified Data.Aeson as Json
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.Foldable (find)
import Data.List (elemIndex, stripPrefix)
import Data.Maybe (fromMaybe, isJust, isNothing)
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Data.Yaml as Yaml
import qualified FeedGipeda.Assets as Assets
import qualified FeedGipeda.Gipeda as Gipeda
import FeedGipeda.GitShell (SHA)
import qualified FeedGipeda.GitShell as GitShell
import FeedGipeda.Prelude
import FeedGipeda.Repo (Repo)
import qualified FeedGipeda.Repo as Repo
import FeedGipeda.Types (Deployment (..))
import Network.URI (URI, uriAuthority, uriPath, uriRegName,
uriToString)
import System.Directory (copyFile, createDirectoryIfMissing,
doesFileExist)
import System.Exit (ExitCode (..))
import System.FilePath (addTrailingPathSeparator, dropExtension,
dropFileName, takeBaseName, (</>))
import System.Process (cwd, proc, readCreateProcessWithExitCode)
executeIn :: Maybe FilePath -> FilePath -> [String] -> IO String
executeIn cwd executable args = do
(exitCode, stdout, stderr) <-
readCreateProcessWithExitCode (proc executable args) { cwd = cwd } ""
logDebug (takeBaseName executable ++ ": " ++ show exitCode)
case exitCode of
ExitFailure _ -> logDebug stderr >> logDebug stdout
_ -> return ()
-- That's too much even for debug
--logDebug "stdout:"
--logDebug stdout
--logDebug "stderr:"
--logDebug stderr
return stdout
{-| @regenerateAndDeploy gipeda rsyncPath repos repo@ updates the @site/@ sub
folder by calling @gipeda@ in @repo@s @projectDir@. That also updates
the @backlog.txt@, which will possibly kick off other benchmarks.
After the site has been regenerated, the changes are deployed via @rsync@
to @remoteDir@, if present. The sub directory to which the site is synced
follow a mapping which should satisfy the tests in @sshSubPathTestFailures@.
-}
regenerateAndDeploy :: FilePath -> Deployment -> Set Repo -> Repo -> IO ()
regenerateAndDeploy gipeda deployment repos repo = do
project <- Repo.projectDir repo
logInfo ("Regenerating " ++ Repo.uri repo ++ " (" ++ Repo.uniqueName repo ++ ")")
clone <- Repo.cloneDir repo
first <- GitShell.firstCommit clone
if isJust first
then do
saveSettings repo
executeIn (Just project) gipeda ["--always-make"]
rsyncSite repos repo deployment
return ()
else
logInfo "There were no commits"
saveSettings :: Repo -> IO ()
saveSettings repo = do
settingsFile <- Repo.settingsFile repo
settings <- Gipeda.settingsForRepo repo
Yaml.encodeFile settingsFile settings
type SSHSubPathPolicy
= (Repo -> Bool, Repo -> FilePath)
sshSubPath :: Repo -> FilePath
sshSubPath repo =
maybe Repo.uniqueName snd (find (\(matches, _) -> matches repo) policies) repo
where
policies :: [SSHSubPathPolicy]
policies =
[ gitHubPolicy
, perfHaskellPolicy
]
stripWWW :: String -> String
stripWWW s =
fromMaybe s (stripPrefix "www." s)
matchRegName :: String -> Repo -> Bool
matchRegName regName =
maybe False ((== regName) . stripWWW . uriRegName) . uriAuthority . Repo.unRepo
gitHubPolicy :: SSHSubPathPolicy
gitHubPolicy =
-- tail because the path will start a slash
(matchRegName "github.com", dropExtension . tail . uriPath . Repo.unRepo)
perfHaskellPolicy :: SSHSubPathPolicy
perfHaskellPolicy =
(matchRegName "git.haskell.org", takeBaseName . uriPath . Repo.unRepo)
sshSubPathTestFailures :: [(Repo, String, String)]
sshSubPathTestFailures =
(map (\(r, e) -> (r, e, sshSubPath r)) . filter isFailure)
[ (Repo.unsafeFromString "https://github.com/sgraf812/benchmark-test", "sgraf812/benchmark-test")
, (Repo.unsafeFromString "https://www.github.com/sgraf812/benchmark-test", "sgraf812/benchmark-test")
, (Repo.unsafeFromString "https://git.haskell.org/sgraf812/benchmark-test", "benchmark-test")
, (Repo.unsafeFromString "https://bitbucket.org/sgraf812/benchmark-test", "benchmark-test-2921196486978765793")
]
where
isFailure (repo, expected) =
sshSubPath repo /= expected
generateMapping :: FilePath -> Set Repo -> IO ()
generateMapping file =
LBS.writeFile file . Json.encode . Json.object . map sshMapping . Set.toList
where
sshMapping repo =
Text.pack (sshSubPath repo) .= Repo.uri repo
parseSSHUri :: String -> (Maybe String, FilePath)
parseSSHUri sshUri =
case elemIndex ':' (reverse sshUri) of
Nothing -> (Nothing, sshUri) -- Assume it's a local file path
Just n ->
let
n' = length sshUri - n
in
(Just (take (n' - 1) sshUri), drop n' sshUri)
rsyncSite :: Set Repo -> Repo -> Deployment -> IO ()
rsyncSite repos repo NoDeployment = return ()
rsyncSite repos repo (Deploy remoteDir) = do
projectDir <- Repo.projectDir repo
-- we need the trailing path separator, otherwise it will add a site
-- sub directory.
-- The rsync-path parameter is used for a little hack that ensures the
-- remote directory acutally exists on the remote machine.
-- Otherwise, we couldn't cope with nested sshSubPaths.
-- -a: archive mode (many different flags), -v verbose, -z compress
logInfo "rsyncing"
let (sshPart, filePart) = parseSSHUri remoteDir
case sshPart of
Nothing -> createDirectoryIfMissing True (filePart </> sshSubPath repo)
Just uri ->
void $ executeIn Nothing "ssh" [uri, "mkdir -p " ++ filePart </> sshSubPath repo]
executeIn Nothing "rsync"
[ "-avz"
, addTrailingPathSeparator (projectDir </> "site")
, remoteDir </> sshSubPath repo
]
generateMapping "sites.json" repos
executeIn Nothing "rsync"
[ "-avz"
, "sites.json"
, remoteDir </> "sites.json"
]
BS.writeFile "default_index.html" Assets.defaultIndexHtml
executeIn Nothing "rsync"
[ "-avz"
, "--ignore-existing" -- so that users can provide their own index.html
, "default_index.html"
, remoteDir </> "index.html"
]
return ()
| sgraf812/feed-gipeda | src/FeedGipeda/Master/Finalize.hs | bsd-3-clause | 6,992 | 0 | 15 | 1,730 | 1,548 | 829 | 719 | 127 | 2 |
-- |
-- Module : Language.SequentCore.Arity
-- Description : Arity analysis and eta-expansion
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Arity and eta expansion
{-
%
% (c) The University of Glasgow 2006
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
Arity and eta expansion
-}
{-# LANGUAGE CPP, ViewPatterns #-}
{-# OPTIONS -fno-warn-tabs #-}
module Language.SequentCore.Arity (
manifestArity, termArity, termBotStrictness_maybe,
termEtaExpandArity, findRhsArity, CheapMeasure, etaExpand, etaExpandRhs
) where
import Language.SequentCore.Syntax
import Language.SequentCore.Translate
import CoreFVs ( exprFreeVars )
import CoreSyn ( tickishIsCode )
import CoreSubst
import Demand
import Var
import VarEnv
import Id
import Type
import TyCon ( initRecTc, checkRecTc )
import Coercion
import BasicTypes
import Unique
import DynFlags ( DynFlags, GeneralFlag(..), gopt )
import Outputable
import FastString
import Pair
import Control.Exception ( assert )
{-
%************************************************************************
%* *
manifestArity and termArity
%* *
%************************************************************************
termArity is a cheap-and-cheerful version of termEtaExpandArity.
It tells how many things the expression can be applied to before doing
any work. It doesn't look inside cases, lets, etc. The idea is that
termEtaExpandArity will do the hard work, leaving something that's easy
for termArity to grapple with. In particular, Simplify uses termArity to
compute the ArityInfo for the Id.
Originally I thought that it was enough just to look for top-level lambdas, but
it isn't. I've seen this
foo = PrelBase.timesInt
We want foo to get arity 2 even though the eta-expander will leave it
unchanged, in the expectation that it'll be inlined. But occasionally it
isn't, because foo is blacklisted (used in a rule).
Similarly, see the ok_note check in termEtaExpandArity. So
f = __inline_me (\x -> e)
won't be eta-expanded.
And in any case it seems more robust to have termArity be a bit more intelligent.
But note that (\x y z -> f x y z)
should have arity 3, regardless of f's arity.
-}
manifestArity :: SeqCoreTerm -> Arity
-- ^ manifestArity sees how many leading value lambdas there are
manifestArity v
= go v
where
go (Lam v e) | isId v = 1 + go e
| otherwise = go e
go (Compute _ (Eval v fs Return)) | all skip fs = go v
go _ = 0
skip (Tick ti) = not (tickishIsCode ti)
skip (Cast _) = True
skip (App _) = False
---------------
termArity :: SeqCoreTerm -> Arity
-- ^ An approximate, fast, version of 'termEtaExpandArity'
termArity e = goT e
where
goT (Var v) = idArity v
goT (Lam x e) | isId x = goT e + 1
| otherwise = goT e
goT (Compute _ (Eval v fs e)) = goF v fs e
goT _ = 0
goF v (Tick t : fs) e | not (tickishIsCode t)
= goF v fs e
goF v (Cast co : fs) e = trim_arity (goF v fs e) (pSnd (coercionKind co))
-- Note [termArity invariant]
goF v (App a : fs) e | Type {} <- a
= goF v fs e
| isTrivialTerm a
= (goF v fs e - 1) `max` 0
-- See Note [termArity for applications]
-- NB: coercions count as a value argument
goF v [] Return = goT v
goF _ _ _ = 0
trim_arity :: Arity -> Type -> Arity
trim_arity arity ty = arity `min` length (typeArity ty)
---------------
typeArity :: Type -> [OneShotInfo]
-- How many value arrows are visible in the type?
-- We look through foralls, and newtypes
-- See Note [termArity invariant]
typeArity ty
= go initRecTc ty
where
go rec_nts ty
| Just (_, ty') <- splitForAllTy_maybe ty
= go rec_nts ty'
| Just (arg,res) <- splitFunTy_maybe ty
= typeOneShot arg : go rec_nts res
| Just (tc,tys) <- splitTyConApp_maybe ty
, Just (ty', _) <- instNewTyCon_maybe tc tys
, Just rec_nts' <- checkRecTc rec_nts tc -- See Note [Expanding newtypes]
-- in TyCon
-- , not (isClassTyCon tc) -- Do not eta-expand through newtype classes
-- -- See Note [Newtype classes and eta expansion]
-- (no longer required)
= go rec_nts' ty'
-- Important to look through non-recursive newtypes, so that, eg
-- (f x) where f has arity 2, f :: Int -> IO ()
-- Here we want to get arity 1 for the result!
--
-- AND through a layer of recursive newtypes
-- e.g. newtype Stream m a b = Stream (m (Either b (a, Stream m a b)))
| otherwise
= []
---------------
termBotStrictness_maybe :: SeqCoreTerm -> Maybe (Arity, StrictSig)
-- A cheap and cheerful function that identifies bottoming functions
-- and gives them a suitable strictness signatures. It's used during
-- float-out
termBotStrictness_maybe e
= case getBotArity (arityType env e) of
Nothing -> Nothing
Just ar -> Just (ar, sig ar)
where
env = AE { ae_bndrs = [], ae_ped_bot = True, ae_cheap_fn = \ _ _ -> False }
sig ar = mkClosedStrictSig (replicate ar topDmd) botRes
-- For this purpose we can be very simple
{-
Note [termArity invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~
termArity has the following invariant:
(1) If typeArity (exprType e) = n,
then manifestArity (etaExpand e n) = n
That is, etaExpand can always expand as much as typeArity says
So the case analysis in etaExpand and in typeArity must match
(2) termArity e <= typeArity (exprType e)
(3) Hence if (termArity e) = n, then manifestArity (etaExpand e n) = n
That is, if termArity says "the arity is n" then etaExpand really
can get "n" manifest lambdas to the top.
Why is this important? Because
- In TidyPgm we use termArity to fix the *final arity* of
each top-level Id, and in
- In CorePrep we use etaExpand on each rhs, so that the visible lambdas
actually match that arity, which in turn means
that the StgRhs has the right number of lambdas
An alternative would be to do the eta-expansion in TidyPgm, at least
for top-level bindings, in which case we would not need the trim_arity
in termArity. That is a less local change, so I'm going to leave it for today!
Note [Newtype classes and eta expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NB: this nasty special case is no longer required, because
for newtype classes we don't use the class-op rule mechanism
at all. See Note [Single-method classes] in TcInstDcls. SLPJ May 2013
-------- Old out of date comments, just for interest -----------
We have to be careful when eta-expanding through newtypes. In general
it's a good idea, but annoyingly it interacts badly with the class-op
rule mechanism. Consider
class C a where { op :: a -> a }
instance C b => C [b] where
op x = ...
These translate to
co :: forall a. (a->a) ~ C a
$copList :: C b -> [b] -> [b]
$copList d x = ...
$dfList :: C b -> C [b]
{-# DFunUnfolding = [$copList] #-}
$dfList d = $copList d |> co@[b]
Now suppose we have:
dCInt :: C Int
blah :: [Int] -> [Int]
blah = op ($dfList dCInt)
Now we want the built-in op/$dfList rule will fire to give
blah = $copList dCInt
But with eta-expansion 'blah' might (and in Trac #3772, which is
slightly more complicated, does) turn into
blah = op (\eta. ($dfList dCInt |> sym co) eta)
and now it is *much* harder for the op/$dfList rule to fire, because
exprIsConApp_maybe won't hold of the argument to op. I considered
trying to *make* it hold, but it's tricky and I gave up.
The test simplCore/should_compile/T3722 is an excellent example.
-------- End of old out of date comments, just for interest -----------
Note [termArity for applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we come to an application we check that the arg is trivial.
eg f (fac x) does not have arity 2,
even if f has arity 3!
* We require that is trivial rather merely cheap. Suppose f has arity 2.
Then f (Just y)
has arity 0, because if we gave it arity 1 and then inlined f we'd get
let v = Just y in \w. <f-body>
which has arity 0. And we try to maintain the invariant that we don't
have arity decreases.
* The `max 0` is important! (\x y -> f x) has arity 2, even if f is
unknown, hence arity 0
%************************************************************************
%* *
Computing the "arity" of an expression
%* *
%************************************************************************
Note [Definition of arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The "arity" of an expression 'e' is n if
applying 'e' to *fewer* than n *value* arguments
converges rapidly
Or, to put it another way
there is no work lost in duplicating the partial
application (e x1 .. x(n-1))
In the divegent case, no work is lost by duplicating because if the thing
is evaluated once, that's the end of the program.
Or, to put it another way, in any context C
C[ (\x1 .. xn. e x1 .. xn) ]
is as efficient as
C[ e ]
It's all a bit more subtle than it looks:
Note [One-shot lambdas]
~~~~~~~~~~~~~~~~~~~~~~~
Consider one-shot lambdas
let x = expensive in \y z -> E
We want this to have arity 1 if the \y-abstraction is a 1-shot lambda.
Note [Dealing with bottom]
~~~~~~~~~~~~~~~~~~~~~~~~~~
A Big Deal with computing arities is expressions like
f = \x -> case x of
True -> \s -> e1
False -> \s -> e2
This happens all the time when f :: Bool -> IO ()
In this case we do eta-expand, in order to get that \s to the
top, and give f arity 2.
This isn't really right in the presence of seq. Consider
(f bot) `seq` 1
This should diverge! But if we eta-expand, it won't. We ignore this
"problem" (unless -fpedantic-bottoms is on), because being scrupulous
would lose an important transformation for many programs. (See
Trac #5587 for an example.)
Consider also
f = \x -> error "foo"
Here, arity 1 is fine. But if it is
f = \x -> case x of
True -> error "foo"
False -> \y -> x+y
then we want to get arity 2. Technically, this isn't quite right, because
(f True) `seq` 1
should diverge, but it'll converge if we eta-expand f. Nevertheless, we
do so; it improves some programs significantly, and increasing convergence
isn't a bad thing. Hence the ABot/ATop in ArityType.
So these two transformations aren't always the Right Thing, and we
have several tickets reporting unexpected bahaviour resulting from
this transformation. So we try to limit it as much as possible:
(1) Do NOT move a lambda outside a known-bottom case expression
case undefined of { (a,b) -> \y -> e }
This showed up in Trac #5557
(2) Do NOT move a lambda outside a case if all the branches of
the case are known to return bottom.
case x of { (a,b) -> \y -> error "urk" }
This case is less important, but the idea is that if the fn is
going to diverge eventually anyway then getting the best arity
isn't an issue, so we might as well play safe
(3) Do NOT move a lambda outside a case unless
(a) The scrutinee is ok-for-speculation, or
(b) There is an enclosing value \x, and the scrutinee is x
E.g. let x = case y of ( DEFAULT -> \v -> blah }
We don't move the \y out. This is pretty arbitrary; but it
catches the common case of doing `seq` on y.
This is the reason for the under_lam argument to arityType.
See Trac #5625
Of course both (1) and (2) are readily defeated by disguising the bottoms.
4. Note [Newtype arity]
~~~~~~~~~~~~~~~~~~~~~~~~
Non-recursive newtypes are transparent, and should not get in the way.
We do (currently) eta-expand recursive newtypes too. So if we have, say
newtype T = MkT ([T] -> Int)
Suppose we have
e = coerce T f
where f has arity 1. Then: etaExpandArity e = 1;
that is, etaExpandArity looks through the coerce.
When we eta-expand e to arity 1: eta_expand 1 e T
we want to get: coerce T (\x::[T] -> (coerce ([T]->Int) e) x)
HOWEVER, note that if you use coerce bogusly you can ge
coerce Int negate
And since negate has arity 2, you might try to eta expand. But you can't
decopose Int to a function type. Hence the final case in eta_expand.
Note [The state-transformer hack]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
f = e
where e has arity n. Then, if we know from the context that f has
a usage type like
t1 -> ... -> tn -1-> t(n+1) -1-> ... -1-> tm -> ...
then we can expand the arity to m. This usage type says that
any application (x e1 .. en) will be applied to uniquely to (m-n) more args
Consider f = \x. let y = <expensive>
in case x of
True -> foo
False -> \(s:RealWorld) -> e
where foo has arity 1. Then we want the state hack to
apply to foo too, so we can eta expand the case.
Then we expect that if f is applied to one arg, it'll be applied to two
(that's the hack -- we don't really know, and sometimes it's false)
See also Id.isOneShotBndr.
Note [State hack and bottoming functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's a terrible idea to use the state hack on a bottoming function.
Here's what happens (Trac #2861):
f :: String -> IO T
f = \p. error "..."
Eta-expand, using the state hack:
f = \p. (\s. ((error "...") |> g1) s) |> g2
g1 :: IO T ~ (S -> (S,T))
g2 :: (S -> (S,T)) ~ IO T
Extrude the g2
f' = \p. \s. ((error "...") |> g1) s
f = f' |> (String -> g2)
Discard args for bottomming function
f' = \p. \s. ((error "...") |> g1 |> g3
g3 :: (S -> (S,T)) ~ (S,T)
Extrude g1.g3
f'' = \p. \s. (error "...")
f' = f'' |> (String -> S -> g1.g3)
And now we can repeat the whole loop. Aargh! The bug is in applying the
state hack to a function which then swallows the argument.
This arose in another guise in Trac #3959. Here we had
catch# (throw exn >> return ())
Note that (throw :: forall a e. Exn e => e -> a) is called with [a = IO ()].
After inlining (>>) we get
catch# (\_. throw {IO ()} exn)
We must *not* eta-expand to
catch# (\_ _. throw {...} exn)
because 'catch#' expects to get a (# _,_ #) after applying its argument to
a State#, not another function!
In short, we use the state hack to allow us to push let inside a lambda,
but not to introduce a new lambda.
Note [ArityType]
~~~~~~~~~~~~~~~~
ArityType is the result of a compositional analysis on expressions,
from which we can decide the real arity of the expression (extracted
with function termEtaExpandArity).
Here is what the fields mean. If an arbitrary expression 'f' has
ArityType 'at', then
* If at = ABot n, then (f x1..xn) definitely diverges. Partial
applications to fewer than n args may *or may not* diverge.
We allow ourselves to eta-expand bottoming functions, even
if doing so may lose some `seq` sharing,
let x = <expensive> in \y. error (g x y)
==> \y. let x = <expensive> in error (g x y)
* If at = ATop as, and n=length as,
then expanding 'f' to (\x1..xn. f x1 .. xn) loses no sharing,
assuming the calls of f respect the one-shot-ness of of
its definition.
NB 'f' is an arbitary expression, eg (f = g e1 e2). This 'f'
can have ArityType as ATop, with length as > 0, only if e1 e2 are
themselves.
* In both cases, f, (f x1), ... (f x1 ... f(n-1)) are definitely
really functions, or bottom, but *not* casts from a data type, in
at least one case branch. (If it's a function in one case branch but
an unsafe cast from a data type in another, the program is bogus.)
So eta expansion is dynamically ok; see Note [State hack and
bottoming functions], the part about catch#
Example:
f = \x\y. let v = <expensive> in
\s(one-shot) \t(one-shot). blah
'f' has ArityType [ManyShot,ManyShot,OneShot,OneShot]
The one-shot-ness means we can, in effect, push that
'let' inside the \st.
Suppose f = \xy. x+y
Then f :: AT [False,False] ATop
f v :: AT [False] ATop
f <expensive> :: AT [] ATop
-------------------- Main arity code ----------------------------
-}
data ArityType = ATop | AOkSpec | AArr OneShotInfo ArityType | ABot Arity
data CoArityType = CTop | CApp CheapFlag CoArityType | CCase ArityType
| CTrunc Arity CoArityType
data CheapFlag = Cheap | NotCheap
aTop :: [OneShotInfo] -> ArityType
aTop = foldr AArr ATop
cheapFlag :: Bool -> CheapFlag
cheapFlag True = Cheap
cheapFlag False = NotCheap
-- ^ The Arity returned is the number of value args the
-- expression can be applied to without doing much work
termEtaExpandArity :: DynFlags -> SeqCoreTerm -> Arity
-- termEtaExpandArity is used when eta expanding
-- e ==> \xy -> e x y
termEtaExpandArity dflags e
= arityTypeArity (arityType env e)
where
env = AE { ae_bndrs = []
, ae_cheap_fn = mk_cheap_fn dflags isCheapApp
, ae_ped_bot = gopt Opt_PedanticBottoms dflags }
getBotArity :: ArityType -> Maybe Arity
-- Arity of a divergent function
getBotArity (ABot n) = Just n
getBotArity _ = Nothing
arityTypeArity :: ArityType -> Arity
arityTypeArity ATop = 0
arityTypeArity AOkSpec = 0
arityTypeArity (ABot a) = a
arityTypeArity (AArr _ at) = 1 + arityTypeArity at
mk_cheap_fn :: DynFlags -> CheapAppMeasure -> CheapMeasure
mk_cheap_fn dflags cheap_app
| not (gopt Opt_DictsCheap dflags)
= \e _ -> termIsCheapBy cheap_app e
| otherwise
= \e mb_ty -> termIsCheapBy cheap_app e
|| case mb_ty of
Nothing -> False
Just ty -> isDictLikeTy ty
----------------------
findRhsArity :: DynFlags -> Id -> SeqCoreTerm -> Arity -> Arity
-- This implements the fixpoint loop for arity analysis
-- See Note [Arity analysis]
findRhsArity dflags bndr rhs old_arity
= go (rhsEtaExpandArity dflags init_cheap_app rhs)
-- We always call termEtaExpandArity once, but usually
-- that produces a result equal to old_arity, and then
-- we stop right away (since arities should not decrease)
-- Result: the common case is that there is just one iteration
where
init_cheap_app :: CheapAppMeasure
init_cheap_app fn n_val_args
| fn == bndr = True -- On the first pass, this binder gets infinite arity
| otherwise = isCheapApp fn n_val_args
go :: Arity -> Arity
go cur_arity
| cur_arity <= old_arity = cur_arity
| new_arity == cur_arity = cur_arity
| otherwise = assert ( new_arity < cur_arity )
#ifdef DEBUG
pprTrace "Exciting arity"
(vcat [ ppr bndr <+> ppr cur_arity <+> ppr new_arity
, ppr rhs])
#endif
go new_arity
where
new_arity = rhsEtaExpandArity dflags cheap_app rhs
cheap_app :: CheapAppMeasure
cheap_app fn n_val_args
| fn == bndr = n_val_args < cur_arity
| otherwise = isCheapApp fn n_val_args
-- ^ The Arity returned is the number of value args the
-- expression can be applied to without doing much work
rhsEtaExpandArity :: DynFlags -> CheapAppMeasure -> SeqCoreTerm -> Arity
-- termEtaExpandArity is used when eta expanding
-- e ==> \xy -> e x y
rhsEtaExpandArity dflags cheap_app e
= case arityType env e of
AArr os at
| isOneShotInfo os || has_lam e -> 1 + arityTypeArity at
-- Don't expand PAPs/thunks
-- Note [Eta expanding thunks]
ABot n -> n
_ -> 0
where
env = AE { ae_bndrs = []
, ae_cheap_fn = mk_cheap_fn dflags cheap_app
, ae_ped_bot = gopt Opt_PedanticBottoms dflags }
has_lam (Lam b e) = isId b || has_lam e
has_lam (Compute _ (Eval v fs Return))
= all skip fs && has_lam v
has_lam _ = False
skip (Tick _) = True
skip _ = False
{-
Note [Arity analysis]
~~~~~~~~~~~~~~~~~~~~~
The motivating example for arity analysis is this:
f = \x. let g = f (x+1)
in \y. ...g...
What arity does f have? Really it should have arity 2, but a naive
look at the RHS won't see that. You need a fixpoint analysis which
says it has arity "infinity" the first time round.
This example happens a lot; it first showed up in Andy Gill's thesis,
fifteen years ago! It also shows up in the code for 'rnf' on lists
in Trac #4138.
The analysis is easy to achieve because termEtaExpandArity takes an
argument
type CheapMeasure = CoreExpr -> Maybe Type -> Bool
used to decide if an expression is cheap enough to push inside a
lambda. And exprIsCheap' in turn takes an argument
type CheapAppFun = Id -> Int -> Bool
which tells when an application is cheap. This makes it easy to
write the analysis loop.
The analysis is cheap-and-cheerful because it doesn't deal with
mutual recursion. But the self-recursive case is the important one.
Note [Eta expanding through dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the experimental -fdicts-cheap flag is on, we eta-expand through
dictionary bindings. This improves arities. Thereby, it also
means that full laziness is less prone to floating out the
application of a function to its dictionary arguments, which
can thereby lose opportunities for fusion. Example:
foo :: Ord a => a -> ...
foo = /\a \(d:Ord a). let d' = ...d... in \(x:a). ....
-- So foo has arity 1
f = \x. foo dInt $ bar x
The (foo DInt) is floated out, and makes ineffective a RULE
foo (bar x) = ...
One could go further and make exprIsCheap reply True to any
dictionary-typed expression, but that's more work.
See Note [Dictionary-like types] in TcType.lhs for why we use
isDictLikeTy here rather than isDictTy
Note [Eta expanding thunks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't eta-expand
* Trivial RHSs x = y
* PAPs x = map g
* Thunks f = case y of p -> \x -> blah
When we see
f = case y of p -> \x -> blah
should we eta-expand it? Well, if 'x' is a one-shot state token
then 'yes' because 'f' will only be applied once. But otherwise
we (conservatively) say no. My main reason is to avoid expanding
PAPSs
f = g d ==> f = \x. g d x
because that might in turn make g inline (if it has an inline pragma),
which we might not want. After all, INLINE pragmas say "inline only
when saturated" so we don't want to be too gung-ho about saturating!
-}
arityLam :: Id -> ArityType -> ArityType
arityLam _ (ABot n) = ABot (n+1)
arityLam id at = AArr (idOneShotInfo id) at
floatIn :: CheapFlag -> ArityType -> ArityType
-- We have something like (let x = E in b),
-- where b has the given arity type.
floatIn Cheap at = at
floatIn NotCheap at = keepOneShots at
-- If E is not cheap, keep arity only for one-shots
keepOneShots :: ArityType -> ArityType
keepOneShots (ABot n) = ABot n
keepOneShots (AArr os at) | isOneShotInfo os = AArr os (keepOneShots at)
| otherwise = ATop
keepOneShots AOkSpec = AOkSpec
keepOneShots ATop = ATop
arityApp :: ArityType -> CheapFlag -> ArityType
-- Processing (fun arg) where at is the ArityType of fun,
-- Knock off an argument and behave like 'let'
arityApp (ABot 0) _ = ABot 0
arityApp (ABot n) _ = ABot (n-1)
arityApp ATop _ = ATop
arityApp AOkSpec _ = ATop
arityApp (AArr _ at) cheap = floatIn cheap at
andArityType :: ArityType -> ArityType -> ArityType -- Used for branches of a 'case'
andArityType AOkSpec AOkSpec = AOkSpec
andArityType AOkSpec at = at
andArityType at AOkSpec = at
andArityType (ABot n1) (ABot n2) = ABot (n1 `min` n2)
andArityType (ABot _) at = at
andArityType at (ABot _) = at
andArityType ATop at = keepOneShots at
andArityType at ATop = keepOneShots at
-- See Note [Combining case branches]
andArityType (AArr os1 at1) (AArr os2 at2) = AArr (os1 `bestOneShot` os2)
(andArityType at1 at2)
cutArityType :: ArityType -> CoArityType -> ArityType
cutArityType (ABot 0) _ = ABot 0
cutArityType at CTop = at
cutArityType at (CTrunc n ca) = cutArityType (truncArityType at n) ca
cutArityType at (CApp ch ca) = cutArityType (arityApp at ch) ca
cutArityType (ABot _) (CCase _) = ATop
cutArityType AOkSpec (CCase at) = at
cutArityType _ (CCase at)
= case at of ABot n | n > 0 -> ATop
| otherwise -> ABot 0
_ -> keepOneShots at
truncArityType :: ArityType -> Arity -> ArityType
truncArityType (ABot n) n' = ABot (n `min` n')
truncArityType (AArr os at) n | n > 0 = AArr os (truncArityType at (n-1))
| otherwise = ATop
truncArityType ATop _ = ATop
truncArityType AOkSpec _ = AOkSpec
{-
Note [Combining case branches]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
go = \x. let z = go e0
go2 = \x. case x of
True -> z
False -> \s(one-shot). e1
in go2 x
We *really* want to eta-expand go and go2.
When combining the barnches of the case we have
ATop [] `andAT` ATop [OneShotLam]
and we want to get ATop [OneShotLam]. But if the inner
lambda wasn't one-shot we don't want to do this.
(We need a proper arity analysis to justify that.)
So we combine the best of the two branches, on the (slightly dodgy)
basis that if we know one branch is one-shot, then they all must be.
-}
---------------------------
type CheapMeasure = SeqCoreTerm -> Maybe Type -> Bool
-- How to decide if an expression is cheap
-- If the Maybe is Just, the type is the type
-- of the expression; Nothing means "don't know"
data ArityEnv
= AE { ae_bndrs :: [Id] -- Enclosing value-lambda Ids
-- See Note [Dealing with bottom (3)]
, ae_cheap_fn :: CheapMeasure
, ae_ped_bot :: Bool -- True <=> be pedantic about bottoms
}
arityType :: ArityEnv -> SeqCoreTerm -> ArityType
arityType = goT
where
goT _ v | termIsBottom v = ABot 0
goT env (Var v)
| v `elem` ae_bndrs env
, not (ae_ped_bot env)
, idArity v == 0
= AOkSpec
| strict_sig <- idStrictness v
, not $ isNopSig strict_sig
, (ds, res) <- splitStrictSig strict_sig
, let arity = length ds
= if isBotRes res then ABot arity
else aTop (take arity one_shots)
| otherwise
= aTop (take (idArity v) one_shots)
where
one_shots :: [OneShotInfo] -- One-shot-ness derived from the type
one_shots = typeArity (idType v)
-- Lambdas; increase arity
goT env (Lam x e)
| isId x = arityLam x (goT env' e)
| otherwise = goT env e
where
env' = env { ae_bndrs = x : ae_bndrs env }
goT env v@(Compute _ c)
= case goC env c of
ATop | termOkForSpeculation v -> AOkSpec
| otherwise -> ATop
other -> other
goT _ _ = ATop
goC env (Eval v fs e) = let at = goT env v
ca = goF env fs e
in cutArityType at ca
goC env (Let b e)
= floatIn (cheap_bind b) (goC env e)
where
cheap_bind (NonRec pair) = cheapFlag $ is_cheap pair
cheap_bind (Rec prs) = cheapFlag $ all is_cheap prs
is_cheap (BindTerm x v) = ae_cheap_fn env v (Just (idType x))
is_cheap (BindJoin _ _) = True -- joins aren't shared anyway
goC _ (Jump {}) = ATop -- TODO
goF env (App arg : fs) e | Type _ <- arg = goF env fs e
| otherwise = CApp cheap (goF env fs e)
where cheap = cheapFlag $ ae_cheap_fn env arg Nothing
goF env (Tick ti : fs) e | not (tickishIsCode ti) = goF env fs e
| otherwise = CTop
goF env (Cast co : fs) e = CTrunc (length (typeArity toTy)) (goF env fs e)
where toTy = pSnd (coercionKind co)
goF env [] e = goE env e
goE _ (Case _ []) = CCase (ABot 0)
goE env (Case _ alts) = CCase $ foldr1 andArityType
[ goC env rhs | Alt _ _ rhs <- alts ]
goE _ Return = CTop
{-
%************************************************************************
%* *
The main eta-expander
%* *
%************************************************************************
At this point, GHC's CoreArity.lhs has a note about wanting eta-expanded code
not to have too many redexes and such, because they want to call this from
CorePrep when there isn't going to be a simplifier pass afterward. However, we
don't need to bother, so we can lean on the simplifier and do almost nothing
here.
TODO: If we ever write SequentCorePrep, we'll have to bring back much of the
complexity here.
-}
-- | @etaExpand n us e ty@ returns an expression with
-- the same meaning as @e@, but with arity @n@.
--
-- Given:
--
-- > e' = etaExpand n us e ty
--
-- We should have that:
--
-- > ty = exprType e = exprType e'
etaExpand :: Arity -- ^ Result should have this number of value args
-> SeqCoreTerm -- ^ Expression to expand
-> SeqCoreTerm
-- etaExpand deals with for-alls. For example:
-- etaExpand 1 E
-- where E :: forall a. a -> a
-- would return
-- (/\b. \y::a -> E b y)
--
-- It deals with coerces too, though they are now rare
-- so perhaps the extra code isn't worth it
etaExpand n orig_term
= go n orig_term
where
-- Strip off existing lambdas (TODO and casts)
-- Note [Eta expansion and SCCs]
go 0 term = term
go n (Lam v body) | isTyVar v = Lam v (go n body)
| otherwise = Lam v (go (n-1) body)
-- go n (Cast expr co) = Cast (go n expr) co
go n term = -- pprTrace "ee" (vcat [ppr orig_expr, ppr expr, ppr etas]) $
etaInfoAbs etas (etaInfoApp term etas bodyTy)
where
in_scope = mkInScopeSet (exprFreeVars (termToCoreExpr term))
(bodyTy, etas) = mkEtaWW n orig_term in_scope (termType term)
etaExpandRhs :: Arity -> SeqCoreBindPair -> SeqCoreBindPair
etaExpandRhs n (BindTerm bndr term) = BindTerm bndr (etaExpand n term)
etaExpandRhs _ bindJoin = bindJoin
-- Wrapper Unwrapper
--------------
data EtaInfo = EtaVar Var -- /\a. [], [] a
-- \x. [], [] x
| EtaCo Coercion -- [] |> co, [] |> (sym co)
instance Outputable EtaInfo where
ppr (EtaVar v) = ptext (sLit "EtaVar") <+> ppr v
ppr (EtaCo co) = ptext (sLit "EtaCo") <+> ppr co
--------------
etaInfoAbs :: [EtaInfo] -> SeqCoreTerm -> SeqCoreTerm
etaInfoAbs [] expr = expr
etaInfoAbs (EtaVar v : eis) expr = Lam v (etaInfoAbs eis expr)
etaInfoAbs (EtaCo co : eis) expr = mkCast (etaInfoAbs eis expr) (mkSymCo co)
--------------
etaInfoApp :: SeqCoreTerm -> [EtaInfo] -> Type -> SeqCoreTerm
-- (etaInfoApp s e eis) returns something equivalent to
-- ((substExpr s e) `appliedto` eis)
etaInfoApp v eis ty
= mkCompute ty (Eval v (map frame eis) Return)
where
frame (EtaVar v) = App (mkVarArg v)
frame (EtaCo co) = Cast co
--------------
mkEtaWW :: Arity -> SeqCoreTerm -> InScopeSet -> Type
-> (Type, [EtaInfo])
-- EtaInfo contains fresh variables,
-- not free in the incoming CoreExpr
-- Outgoing InScopeSet includes the EtaInfo vars
-- and the original free vars
mkEtaWW orig_n orig_expr in_scope orig_ty
= go orig_n empty_subst orig_ty []
where
empty_subst = TvSubst in_scope emptyTvSubstEnv
go n subst ty eis -- See Note [termArity invariant]
| n == 0
= (ty, reverse eis)
| Just (tv,ty') <- splitForAllTy_maybe ty
, let (subst', tv') = Type.substTyVarBndr subst tv
-- Avoid free vars of the original expression
= go n subst' ty' (EtaVar tv' : eis)
| Just (arg_ty, res_ty) <- splitFunTy_maybe ty
, let (subst', eta_id') = freshEtaId n subst arg_ty
-- Avoid free vars of the original expression
= go (n-1) subst' res_ty (EtaVar eta_id' : eis)
| Just (co, ty') <- topNormaliseNewType_maybe ty
= -- Given this:
-- newtype T = MkT ([T] -> Int)
-- Consider eta-expanding this
-- eta_expand 1 e T
-- We want to get
-- coerce T (\x::[T] -> (coerce ([T]->Int) e) x)
go n subst ty' (EtaCo co : eis)
| otherwise -- We have an expression of arity > 0,
-- but its type isn't a function.
= warnPprTrace True __FILE__ __LINE__ ((ppr orig_n <+> ppr orig_ty) $$ ppr orig_expr)
(ty, reverse eis)
-- This *can* legitmately happen:
-- e.g. coerce Int (\x. x) Essentially the programmer is
-- playing fast and loose with types (Happy does this a lot).
-- So we simply decline to eta-expand. Otherwise we'd end up
-- with an explicit lambda having a non-function type
--------------
freshEtaId :: Int -> TvSubst -> Type -> (TvSubst, Id)
-- Make a fresh Id, with specified type (after applying substitution)
-- It should be "fresh" in the sense that it's not in the in-scope set
-- of the TvSubstEnv; and it should itself then be added to the in-scope
-- set of the TvSubstEnv
--
-- The Int is just a reasonable starting point for generating a unique;
-- it does not necessarily have to be unique itself.
freshEtaId n subst ty
= (subst', eta_id')
where
ty' = Type.substTy subst ty
eta_id' = uniqAway (getTvInScope subst) $
mkSysLocal (fsLit "eta") (mkBuiltinUnique n) ty'
subst' = extendTvInScope subst eta_id'
| lukemaurer/sequent-core | src/Language/SequentCore/Arity.hs | bsd-3-clause | 35,513 | 0 | 14 | 10,619 | 4,630 | 2,365 | 2,265 | 295 | 16 |
module Network.TeleHash.Hn
(
hn_fromjson
, hn_frompacket
, hn_getparts
, hn_path
, hn_get
, hn_address
) where
import Control.Exception
import Control.Monad
import Control.Monad.State
import Data.List
import Data.Maybe
import Prelude hiding (id, (.), head, either)
import System.Time
import Network.TeleHash.Crypt
import Network.TeleHash.Packet
import Network.TeleHash.Path
import Network.TeleHash.Paths
import Network.TeleHash.Types
import Network.TeleHash.Utils
import qualified Data.ByteString.Char8 as BC
import qualified Data.Map as Map
import qualified Data.Set as Set
-- ---------------------------------------------------------------------
hn_fromjson :: RxTelex -> TeleHash (Maybe HashName)
hn_fromjson p = do
-- get/gen the hashname
let mpp1 = packet_get_packet p "from"
mpp2 = if mpp1 == Nothing
then packet_get_packet p "parts"
else mpp1
case mpp2 of
Nothing -> return Nothing
Just pp -> do
let mparts = parseJsVal pp :: Maybe Parts
case mparts of
Nothing -> return Nothing
Just parts -> do
mhn <- hn_getparts parts
case mhn of
Nothing -> return Nothing
Just hn -> do
-- if any paths are stored, associate them
let mpp4 = packet_get_packet p "paths"
case mpp4 of
Nothing -> return ()
Just pp4 -> do
let mpaths = parseJsVal pp4 :: Maybe [PathJson]
case mpaths of
Nothing -> return ()
Just paths -> do
forM_ paths $ \path -> do
mpath2 <- hn_path hn path
case mpath2 of
Nothing -> return ()
Just path2 -> do
putPath hn $ path2 { pAtIn = Nothing }
-- already have crypto
hc <- getHN hn
case (hCrypto hc) of
Just _ -> return ()
Nothing -> do
if (BC.length (unBody $ paBody $ rtPacket p) /= 0)
then do
c <- crypt_new (hCsid hc) Nothing (Just (unBody $ paBody $ rtPacket p))
-- putHN $ hc { hCrypto = c}
withHN hn $ \hc -> hc { hCrypto = c}
return ()
else do
let mpp = packet_get_packet p "keys"
logT $ "hn_fromjson:pp=" ++ show mpp
case mpp of
Nothing -> return ()
Just pp1 -> do
let mkey = packet_get_str (packet_from_val pp1) (hCsid hc)
logT $ "hn_fromjson:mkey=" ++ show mkey
case mkey of
Nothing -> return ()
Just key -> do
c <- crypt_new (hCsid hc) (Just key) Nothing
-- putHN $ hc { hCrypto = c}
withHN hn $ \hc -> hc { hCrypto = c}
return ()
hcFinal <- getHN hn
if isNothing (hCrypto hcFinal)
then return Nothing
else return (Just hn)
{-
// derive a hn from json seed or connect format
hn_t hn_fromjson(xht_t index, packet_t p)
{
char *key;
hn_t hn = NULL;
packet_t pp, next;
path_t path;
if(!p) return NULL;
// get/gen the hashname
pp = packet_get_packet(p,"from");
if(!pp) pp = packet_get_packet(p,"parts");
hn = hn_getparts(index, pp); // frees pp
if(!hn) return NULL;
// if any paths are stored, associte them
pp = packet_get_packets(p, "paths");
while(pp)
{
path = hn_path(hn, path_parse((char*)pp->json, pp->json_len));
if(path) path->atIn = 0; // don't consider this path alive
next = pp->next;
packet_free(pp);
pp = next;
}
// already have crypto
if(hn->c) return hn;
if(p->body_len)
{
hn->c = crypt_new(hn->csid, p->body, p->body_len);
}else{
pp = packet_get_packet(p, "keys");
key = packet_get_str(pp,hn->hexid);
if(key) hn->c = crypt_new(hn->csid, (unsigned char*)key, strlen(key));
packet_free(pp);
}
return (hn->c) ? hn : NULL;
}
-}
-- ---------------------------------------------------------------------
hn_frompacket :: OpenizeInner -> DeOpenizeResult -> TeleHash (Maybe HashContainer)
hn_frompacket _ DeOpenizeVerifyFail = return Nothing
hn_frompacket inner deopen = do
-- get/gen the hashname
mhn <- hn_getparts (oiFrom inner)
case mhn of
Nothing -> do
logT $ "hn_frompacket:cannot get hashname " ++ show inner
return Nothing
Just hn -> do
hc <- getHN hn
-- load key from packet body
hc2 <- if isNothing (hCrypto hc)
then do
mcrypt <- crypt_new (doCsid deopen) Nothing (Just $ doKey deopen)
withHN hn $ \hc3 -> hc3 { hCrypto = mcrypt }
else return hc
return (Just hc2)
-- ---------------------------------------------------------------------
hn_getparts :: Parts -> TeleHash (Maybe HashName)
hn_getparts parts = do
logT $ "hn_getparts: must still match highest cipher set"
let hashName = parts2hn parts
mhc <- getHNMaybe hashName
case mhc of
Nothing -> do
-- putHN $ newHashContainer hashName
void $ newHN hashName
return ()
Just _ -> return ()
-- hc <- getHN hashName
-- putHN $ hc { hCsid = "1a"
-- , hParts = Just parts
-- }
hc <- withHN hashName $ \hc
-> hc { hCsid = "1a"
, hParts = Just parts
}
return (Just (hHashName hc))
{-
hn_t hn_getparts(xht_t index, packet_t p)
{
char *part, csid, csids[16], hex[3]; // max parts of 8
int i,ids,ri,len;
unsigned char *rollup, hnbin[32];
char best = 0;
hn_t hn;
if(!p) return NULL;
hex[2] = 0;
for(ids=i=0;ids<8 && p->js[i];i+=4)
{
if(p->js[i+1] != 2) continue; // csid must be 2 char only
memcpy(hex,p->json+p->js[i],2);
memcpy(csids+(ids*2),hex,2);
util_unhex((unsigned char*)hex,2,(unsigned char*)&csid);
if(csid > best && xht_get(index,hex)) best = csid; // matches if we have the same csid in index (for our own keys)
ids++;
}
if(!best) return NULL; // we must match at least one
util_sort(csids,ids,2,csidcmp,NULL);
rollup = NULL;
ri = 0;
for(i=0;i<ids;i++)
{
len = 2;
if(!(rollup = util_reallocf(rollup,ri+len))) return NULL;
memcpy(rollup+ri,csids+(i*2),len);
crypt_hash(rollup,ri+len,hnbin);
ri = 32;
if(!(rollup = util_reallocf(rollup,ri))) return NULL;
memcpy(rollup,hnbin,ri);
memcpy(hex,csids+(i*2),2);
part = packet_get_str(p, hex);
if(!part) continue; // garbage safety
len = strlen(part);
if(!(rollup = util_reallocf(rollup,ri+len))) return NULL;
memcpy(rollup+ri,part,len);
crypt_hash(rollup,ri+len,hnbin);
memcpy(rollup,hnbin,32);
}
memcpy(hnbin,rollup,32);
free(rollup);
hn = hn_get(index, hnbin);
if(!hn) return NULL;
if(!hn->parts) hn->parts = p;
else packet_free(p);
hn->csid = best;
util_hex((unsigned char*)&best,1,(unsigned char*)hn->hexid);
return hn;
}
-}
-- ---------------------------------------------------------------------
hn_path :: HashName -> PathJson -> TeleHash (Maybe Path)
hn_path hn p = do
-- logT $ "hn_path:" ++ show (hn,p,hPaths hc)
logT $ "hn_path:" ++ show hn ++ "," ++ showPathJson p
path <- path_get hn p
-- update public ip if found
case pJson path of
lp@(PIPv4 pipv4) -> do
case pjsonIp lp of
Nothing -> return ()
Just ip -> do
logT $ "hn_path:checking ip:" ++ show ip
if isLocalIP ip
then return ()
else do
hnOwn <- getOwnHN
if hn == hnOwn
then do
logR $ "hn_path:got our remote ip:" ++ show ip
sw <- get
put $ sw {swExternalIPP = Just pipv4 }
-- update our own HN to have the new path
hnSelf <- getOwnHN
putPath hnSelf (pathFromPathJson lp)
else return ()
-- update public ipv4 info
void $ withHN hn $ \hc1 -> hc1 { hExternalIPP = Just pipv4 }
_ -> return ()
logT $ "TODO:hn_path:lots more stuff"
return (Just path)
{- JS version
This is called on a successful open received, and on every line packet received
// manage network information consistently, called on all validated incoming packets
hn.pathIn = function(path)
{
path = hn.pathGet(path);
if(!path) return false;
// first time we've seen em
if(!path.recvAt && !path.sentAt)
{
debug("PATH INNEW",isLocalPath(path)?"local":"public",JSON.stringify(path.json),hn.paths.map(function(p){return JSON.stringify(p.json)}));
// update public ipv4 info
if(path.type == "ipv4" && !isLocalIP(path.ip))
{
hn.ip = path.ip;
hn.port = path.port;
}
// cull any invalid paths of the same type
hn.paths.forEach(function(other){
if(other == path) return;
if(other.type != path.type) return;
if(!pathValid(other)) return hn.pathEnd(other);
// remove any previous path on the same IP
if(path.ip && other.ip == path.ip) return hn.pathEnd(other);
// remove any previous http path entirely
if(path.type == "http") return hn.pathEnd(other);
});
// any custom non-public paths, we must bridge for
if(pathShareOrder.indexOf(path.type) == -1) hn.bridging = true;
// track overall if we trust them as local
if(isLocalPath(path) && !hn.isLocal)
{
hn.isLocal = true;
hn.pathSync();
}
}
// always update default to newest
path.recvAt = Date.now();
hn.to = path;
return path;
}
-}
-- ---------------------------------------------------------------------
-- |get a HashContainer from the index, creating it if not already present
hn_get :: HashName -> TeleHash HashContainer
hn_get hn = do
sw <- get
case Map.lookup hn (swIndex sw) of
Just hc -> return hc
Nothing -> do
-- let hc = newHashContainer hn
-- putHN hc
hc <- newHN hn
return hc
-- ---------------------------------------------------------------------
hn_address :: HashName -> HashName -> TeleHash [String]
hn_address fromHN toHN = do
hc <- getHN fromHN
to <- getHN toHN
if not (isJust (hParts hc) && isJust (hParts to))
then return []
else do
case partsMatch (fromJust $ hParts hc) (fromJust $ hParts to) of
Nothing -> return []
Just csid -> do
case hExternalIPP hc of
Nothing -> return [unHN fromHN,csid]
Just ipp -> return [unHN fromHN,csid,show (v4Ip ipp),show (v4Port ipp)]
{-
// return our address to them
hn.address = function(to)
{
if(!to) return "";
var csid = partsMatch(hn.parts,to.parts);
if(!csid) return "";
if(!hn.ip) return [hn.hashname,csid].join(",");
return [hn.hashname,csid,hn.ip,hn.port].join(",");
}
-}
-- ---------------------------------------------------------------------
partsMatch :: Parts -> Parts -> Maybe String
partsMatch parts1 parts2 = r
where
ids = sort $ map fst parts1
p2 = Set.fromList $ map fst parts2
common = filter (\k -> Set.member k p2) ids
r = if common == [] then Nothing
else Just $ head common
{-
function partsMatch(parts1, parts2)
{
if(typeof parts1 != "object" || typeof parts2 != "object") return false;
var ids = Object.keys(parts1).sort();
var csid;
while(csid = ids.pop()) if(parts2[csid]) return csid;
return false;
}
-}
-- ---------------------------------------------------------------------
| alanz/htelehash | src/Network/TeleHash/Hn.hs | bsd-3-clause | 12,134 | 8 | 63 | 3,905 | 1,959 | 948 | 1,011 | 166 | 13 |
{-# LANGUAGE DisambiguateRecordFields #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-|
Hoon's `map` and `set` types and conversions to/from Nouns.
-}
module Urbit.Noun.Tree
( HoonSet, setToHoonSet, setFromHoonSet
, HoonMap, mapToHoonMap, mapFromHoonMap
, mug
) where
import ClassyPrelude
import Control.Lens hiding (non)
import Urbit.Atom
import Urbit.Noun.Conversions ()
import Urbit.Noun.Convert
import Urbit.Noun.Core
import Urbit.Noun.TH
import Data.Bits (shiftR, xor)
import Data.Hash.Murmur (murmur3)
import GHC.Natural (Natural)
-- Types -----------------------------------------------------------------------
data NounVal a = NounVal
{ non ∷ Noun
, val ∷ !a
}
data HoonTreeNode a = NTN
{ n ∷ NounVal a
, l ∷ HoonTree a
, r ∷ HoonTree a
}
deriving (Eq, Ord, Show)
data HoonTree a = E | Node (HoonTreeNode a)
deriving (Eq, Ord, Show)
pattern N n l r = Node (NTN n l r)
newtype HoonSet a = HoonSet { unHoonSet ∷ HoonTree a }
deriving newtype (Eq, Ord, Show, FromNoun, ToNoun)
newtype HoonMap k v = HoonMap { unHoonMap ∷ HoonTree (k, v) }
deriving newtype (Eq, Ord, Show, FromNoun, ToNoun)
-- Instances -------------------------------------------------------------------
instance Eq (NounVal a) where
(==) = on (==) non
instance Ord (NounVal a) where
compare = comparing non
instance ToNoun (NounVal a) where
toNoun = non
instance Show a ⇒ Show (NounVal a) where
show = show . val
instance FromNoun a ⇒ FromNoun (NounVal a) where
parseNoun x = NounVal x <$> parseNoun x
instance ToNoun a ⇒ ToNoun (HoonTree a) where
toNoun E = A 0
toNoun (Node n) = toNoun n
instance FromNoun a ⇒ FromNoun (HoonTree a) where
parseNoun (A 0) = pure E
parseNoun n = Node <$> parseNoun n
deriveNoun ''HoonTreeNode
-- Mug -------------------------------------------------------------------------
type Nat = Natural
slowMug ∷ Noun → Nat
slowMug = trim 0xcafe_babe . \case
A a → a
C h t → mix (slowMug h) $ mix 0x7fff_ffff (slowMug t)
where
trim ∷ Nat → Nat → Nat
trim syd key =
if 0/=ham then ham else trim (succ syd) key
where
haz = muk syd (met 3 key) key
ham = mix (rsh 0 31 haz) (end 0 31 haz)
mix ∷ Nat → Nat → Nat
mix = xor
-- Murmur3
muk ∷ Nat → Nat → Nat → Nat
muk seed len =
fromIntegral . murmur3 (word32 seed) . resize . atomBytes
where
resize ∷ ByteString → ByteString
resize buf =
case compare (length buf) (int len) of
EQ → buf
LT → error "bad-muk"
GT → error "bad-muk"
-- LT → buf <> replicate (len - length buf) 0
-- GT → take len buf
int ∷ Integral i ⇒ i → Int
int = fromIntegral
word32 ∷ Integral i ⇒ i → Word32
word32 = fromIntegral
bex ∷ Nat → Nat
bex = (2^)
end ∷ Nat → Nat → Nat → Nat
end blockSize blocks n =
n `mod` (bex (bex blockSize * blocks))
rsh ∷ Nat → Nat → Nat → Nat
rsh blockSize blocks n =
shiftR n $ fromIntegral $ (bex blockSize * blocks)
met ∷ Nat → Nat → Nat
met bloq = go 0
where
go c 0 = c
go c n = go (succ c) (rsh bloq 1 n)
-- XX TODO
mug ∷ Noun → Nat
mug = slowMug
-- Order -----------------------------------------------------------------------
{-
Orders in ascending double mug hash order, collisions fall back to dor.
-}
mor ∷ Noun → Noun → Bool
mor a b = if c == d then dor a b else c < d
where
c = mug $ A $ mug a
d = mug $ A $ mug b
{-
Orders in ascending tree depth.
-}
dor ∷ Noun → Noun → Bool
dor a b | a == b = True
dor (A a) (C _ _) = True
dor (C x y) (A b) = False
dor (A a) (A b) = a < b
dor (C x y) (C p q) | x == p = dor y q
dor (C x y) (C p q) = dor x p
{-
Orders in ascending +mug hash order.
Collisions fall back to dor.
-}
gor ∷ Noun → Noun → Bool
gor a b = if c==d then dor a b else c<d
where (c, d) = (mug a, mug b)
morVal, gorVal ∷ NounVal a → NounVal a → Bool
morVal = on mor non
gorVal = on gor non
--------------------------------------------------------------------------------
nounVal ∷ ToNoun a ⇒ Iso' a (NounVal a)
nounVal = iso to val
where
to x = NounVal (toNoun x) x
treeToList ∷ ∀a. HoonTree a → [a]
treeToList = go []
where
go ∷ [a] → HoonTree a → [a]
go acc = \case
E → acc
Node (NTN v l r) → go (go (val v : acc) l) r
setFromHoonSet ∷ Ord a ⇒ HoonSet a → Set a
setFromHoonSet = setFromList . treeToList . unHoonSet
mapFromHoonMap ∷ Ord k ⇒ HoonMap k v → Map k v
mapFromHoonMap = mapFromList . treeToList . unHoonMap
setToHoonSet ∷ ∀a. (Ord a, ToNoun a) ⇒ Set a → HoonSet a
setToHoonSet = HoonSet . foldr put E . fmap (view nounVal) . setToList
where
put x = \case
E → N x E E
Node a | x == n a → Node a
Node a | gorVal x (n a) → lef x a
Node a → rit x a
rit x a = put x (r a) & \case
E → error "bad-put-set"
Node c | morVal (n a) (n c) → N (n a) (l a) (Node c)
Node c → N (n c) (N (n a) (l a) (l c)) (r c)
lef x a = put x (l a) & \case
E → error "bad-put-set"
Node c | morVal (n a) (n c) → N (n a) (Node c) (r a)
Node c → N (n c) (l c) (N (n a) (r c) (r a))
p ∷ (ToNoun a, ToNoun b) ⇒ NounVal (a,b) → NounVal a
p = view (from nounVal . to fst . nounVal)
pq ∷ (ToNoun a, ToNoun b) ⇒ NounVal (a,b) → (NounVal a, NounVal b)
pq = boof . view (from nounVal)
where
boof (x, y) = (x ^. nounVal, y ^. nounVal)
mapToHoonMap ∷ ∀k v. (ToNoun k, ToNoun v, Ord k, Ord v) ⇒ Map k v → HoonMap k v
mapToHoonMap = HoonMap . foldr put E . fmap (view nounVal) . mapToList
where
put ∷ NounVal (k, v) → HoonTree (k, v) → HoonTree (k, v)
put kv@(pq -> (b, c)) = \case
E → N kv E E
Node a | kv == n a → Node a
Node a | b == p (n a) → N kv (l a) (r a)
Node a | gorVal b (p $ n a) → lef kv a
Node a → rit kv a
lef kv@(pq -> (b, c)) a = put kv (l a) & \case
E → error "bad-put-map"
Node d | morVal (p $ n a) (p $ n d) → N (n a) (Node d) (r a)
Node d → N (n d) (l d) (N (n a) (r d) (r a))
rit kv@(pq -> (b, c)) a = put kv (r a) & \case
E → error "bad-put-map"
Node d | morVal (p $ n a) (p $ n d) → N (n a) (l a) (Node d)
Node d → N (n d) (N (n a) (l a) (l d)) (r d)
| jfranklin9000/urbit | pkg/hs/urbit-king/lib/Urbit/Noun/Tree.hs | mit | 6,880 | 0 | 17 | 2,183 | 2,998 | 1,518 | 1,480 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.DescribeVPNGateways
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Describes one or more of your virtual private gateways.
--
-- For more information about virtual private gateways, see
-- <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_VPN.html Adding an IPsec Hardware VPN to Your VPC>
-- in the /Amazon Virtual Private Cloud User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeVPNGateways.html AWS API Reference> for DescribeVPNGateways.
module Network.AWS.EC2.DescribeVPNGateways
(
-- * Creating a Request
describeVPNGateways
, DescribeVPNGateways
-- * Request Lenses
, dvgsFilters
, dvgsVPNGatewayIds
, dvgsDryRun
-- * Destructuring the Response
, describeVPNGatewaysResponse
, DescribeVPNGatewaysResponse
-- * Response Lenses
, dvgrsVPNGateways
, dvgrsResponseStatus
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'describeVPNGateways' smart constructor.
data DescribeVPNGateways = DescribeVPNGateways'
{ _dvgsFilters :: !(Maybe [Filter])
, _dvgsVPNGatewayIds :: !(Maybe [Text])
, _dvgsDryRun :: !(Maybe Bool)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeVPNGateways' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dvgsFilters'
--
-- * 'dvgsVPNGatewayIds'
--
-- * 'dvgsDryRun'
describeVPNGateways
:: DescribeVPNGateways
describeVPNGateways =
DescribeVPNGateways'
{ _dvgsFilters = Nothing
, _dvgsVPNGatewayIds = Nothing
, _dvgsDryRun = Nothing
}
-- | One or more filters.
--
-- - 'attachment.state' - The current state of the attachment between the
-- gateway and the VPC ('attaching' | 'attached' | 'detaching' |
-- 'detached').
--
-- - 'attachment.vpc-id' - The ID of an attached VPC.
--
-- - 'availability-zone' - The Availability Zone for the virtual private
-- gateway.
--
-- - 'state' - The state of the virtual private gateway ('pending' |
-- 'available' | 'deleting' | 'deleted').
--
-- - 'tag':/key/=/value/ - The key\/value combination of a tag assigned
-- to the resource.
--
-- - 'tag-key' - The key of a tag assigned to the resource. This filter
-- is independent of the 'tag-value' filter. For example, if you use
-- both the filter \"tag-key=Purpose\" and the filter \"tag-value=X\",
-- you get any resources assigned both the tag key Purpose (regardless
-- of what the tag\'s value is), and the tag value X (regardless of
-- what the tag\'s key is). If you want to list only resources where
-- Purpose is X, see the 'tag':/key/=/value/ filter.
--
-- - 'tag-value' - The value of a tag assigned to the resource. This
-- filter is independent of the 'tag-key' filter.
--
-- - 'type' - The type of virtual private gateway. Currently the only
-- supported type is 'ipsec.1'.
--
-- - 'vpn-gateway-id' - The ID of the virtual private gateway.
--
dvgsFilters :: Lens' DescribeVPNGateways [Filter]
dvgsFilters = lens _dvgsFilters (\ s a -> s{_dvgsFilters = a}) . _Default . _Coerce;
-- | One or more virtual private gateway IDs.
--
-- Default: Describes all your virtual private gateways.
dvgsVPNGatewayIds :: Lens' DescribeVPNGateways [Text]
dvgsVPNGatewayIds = lens _dvgsVPNGatewayIds (\ s a -> s{_dvgsVPNGatewayIds = a}) . _Default . _Coerce;
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
dvgsDryRun :: Lens' DescribeVPNGateways (Maybe Bool)
dvgsDryRun = lens _dvgsDryRun (\ s a -> s{_dvgsDryRun = a});
instance AWSRequest DescribeVPNGateways where
type Rs DescribeVPNGateways =
DescribeVPNGatewaysResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
DescribeVPNGatewaysResponse' <$>
(x .@? "vpnGatewaySet" .!@ mempty >>=
may (parseXMLList "item"))
<*> (pure (fromEnum s)))
instance ToHeaders DescribeVPNGateways where
toHeaders = const mempty
instance ToPath DescribeVPNGateways where
toPath = const "/"
instance ToQuery DescribeVPNGateways where
toQuery DescribeVPNGateways'{..}
= mconcat
["Action" =: ("DescribeVpnGateways" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
toQuery (toQueryList "Filter" <$> _dvgsFilters),
toQuery
(toQueryList "VpnGatewayId" <$> _dvgsVPNGatewayIds),
"DryRun" =: _dvgsDryRun]
-- | /See:/ 'describeVPNGatewaysResponse' smart constructor.
data DescribeVPNGatewaysResponse = DescribeVPNGatewaysResponse'
{ _dvgrsVPNGateways :: !(Maybe [VPNGateway])
, _dvgrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeVPNGatewaysResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dvgrsVPNGateways'
--
-- * 'dvgrsResponseStatus'
describeVPNGatewaysResponse
:: Int -- ^ 'dvgrsResponseStatus'
-> DescribeVPNGatewaysResponse
describeVPNGatewaysResponse pResponseStatus_ =
DescribeVPNGatewaysResponse'
{ _dvgrsVPNGateways = Nothing
, _dvgrsResponseStatus = pResponseStatus_
}
-- | Information about one or more virtual private gateways.
dvgrsVPNGateways :: Lens' DescribeVPNGatewaysResponse [VPNGateway]
dvgrsVPNGateways = lens _dvgrsVPNGateways (\ s a -> s{_dvgrsVPNGateways = a}) . _Default . _Coerce;
-- | The response status code.
dvgrsResponseStatus :: Lens' DescribeVPNGatewaysResponse Int
dvgrsResponseStatus = lens _dvgrsResponseStatus (\ s a -> s{_dvgrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeVPNGateways.hs | mpl-2.0 | 6,785 | 0 | 15 | 1,419 | 805 | 491 | 314 | 91 | 1 |
-- Contains HalfsState, the "global" data structure for tracking
-- filesystem state.
module Halfs.HalfsState
(
HalfsState(..)
)
where
import Data.Map as M
import Data.Word
import Halfs.BlockMap (BlockMap)
import Halfs.Errors (HalfsError)
import Halfs.Monad (HalfsT)
import Halfs.Protection (UserID, GroupID)
import Halfs.SuperBlock (SuperBlock)
import Halfs.Types (DirHandle, FileHandle, InodeRef, LockedRscRef)
import System.Device.BlockDevice (BlockDevice)
type HalfsM b r l m a = HalfsT HalfsError (Maybe (HalfsState b r l m)) m a
type FHMapVal b r l m = (FileHandle r l, Word64, HalfsM b r l m ())
data HalfsState b r l m = HalfsState {
hsBlockDev :: BlockDevice m
, hsUserID :: UserID
, hsGroupID :: GroupID
, hsLogger :: Maybe (String -> m ())
, hsSizes :: (Word64, Word64, Word64, Word64)
-- ^ explicitly memoized Inode.computeSizes
, hsBlockMap :: BlockMap b r l
, hsSuperBlock :: r SuperBlock
, hsLock :: l
, hsNumFileNodes :: LockedRscRef l r Word64
, hsDHMap :: LockedRscRef l r (M.Map InodeRef (DirHandle r l))
-- ^ Tracks active directory handles; we probably want to add a
-- (refcounting?) expiry mechanism so that the size of the map is
-- bounded. TODO.
, hsFHMap :: LockedRscRef l r (M.Map InodeRef (FHMapVal b r l m))
-- ^ Tracks active file handles, their open counts, and an
-- on-final-close hook. This last monadic action is executed
-- whenever the open count becomes 0 (we do this because, e.g.,
-- unlinking files can cause deferred resource allocation for
-- currently-open files).
, hsInodeLockMap :: LockedRscRef l r (M.Map InodeRef (l, Word64))
-- ^ Tracks refcnt'd inode locks. For now, these are single reader/writer
-- locks.
}
| hackern/halfs | Halfs/HalfsState.hs | bsd-3-clause | 1,929 | 0 | 13 | 535 | 407 | 244 | 163 | 27 | 0 |
module Web.Heroku.Postgres (
dbConnParams
, parseDatabaseUrl
) where
import Data.Text
import Web.Heroku.Internal (dbConnParams', parseDatabaseUrl')
dbConnParams :: IO [(Text, Text)]
dbConnParams = dbConnParams' "DATABASE_URL" parseDatabaseUrl
parseDatabaseUrl :: String -> [(Text, Text)]
parseDatabaseUrl = parseDatabaseUrl' "postgres:"
| thoughtbot/haskell-heroku | Web/Heroku/Postgres.hs | bsd-3-clause | 350 | 0 | 7 | 47 | 87 | 52 | 35 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.S3.DeleteBucketTagging
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes the tags from the bucket.
--
-- <http://docs.aws.amazon.com/AmazonS3/latest/API/DeleteBucketTagging.html>
module Network.AWS.S3.DeleteBucketTagging
(
-- * Request
DeleteBucketTagging
-- ** Request constructor
, deleteBucketTagging
-- ** Request lenses
, dbtBucket
-- * Response
, DeleteBucketTaggingResponse
-- ** Response constructor
, deleteBucketTaggingResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.S3
import Network.AWS.S3.Types
import qualified GHC.Exts
newtype DeleteBucketTagging = DeleteBucketTagging
{ _dbtBucket :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeleteBucketTagging' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbtBucket' @::@ 'Text'
--
deleteBucketTagging :: Text -- ^ 'dbtBucket'
-> DeleteBucketTagging
deleteBucketTagging p1 = DeleteBucketTagging
{ _dbtBucket = p1
}
dbtBucket :: Lens' DeleteBucketTagging Text
dbtBucket = lens _dbtBucket (\s a -> s { _dbtBucket = a })
data DeleteBucketTaggingResponse = DeleteBucketTaggingResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeleteBucketTaggingResponse' constructor.
deleteBucketTaggingResponse :: DeleteBucketTaggingResponse
deleteBucketTaggingResponse = DeleteBucketTaggingResponse
instance ToPath DeleteBucketTagging where
toPath DeleteBucketTagging{..} = mconcat
[ "/"
, toText _dbtBucket
]
instance ToQuery DeleteBucketTagging where
toQuery = const "tagging"
instance ToHeaders DeleteBucketTagging
instance ToXMLRoot DeleteBucketTagging where
toXMLRoot = const (namespaced ns "DeleteBucketTagging" [])
instance ToXML DeleteBucketTagging
instance AWSRequest DeleteBucketTagging where
type Sv DeleteBucketTagging = S3
type Rs DeleteBucketTagging = DeleteBucketTaggingResponse
request = delete
response = nullResponse DeleteBucketTaggingResponse
| romanb/amazonka | amazonka-s3/gen/Network/AWS/S3/DeleteBucketTagging.hs | mpl-2.0 | 3,016 | 0 | 9 | 646 | 362 | 218 | 144 | 49 | 1 |
module Foreign.JavaScript.Internal.Utils
( synchronously
, freeRequestAnimationFrameCallback
, js_dataView
) where
import GHCJS.DOM.Types (Callback (..), JSM, JSVal, RequestAnimationFrameCallback (..))
import Language.Javascript.JSaddle.Object (freeFunction, jsg, new)
synchronously :: JSM a -> JSM a
synchronously = id
freeRequestAnimationFrameCallback :: RequestAnimationFrameCallback -> JSM ()
freeRequestAnimationFrameCallback (RequestAnimationFrameCallback (Callback cb)) = freeFunction cb
js_dataView :: Int -> Int -> JSVal -> JSM JSVal
js_dataView off len ref = new (jsg "DataView") (ref, off, len)
| reflex-frp/reflex-dom | reflex-dom-core/src-ghc/Foreign/JavaScript/Internal/Utils.hs | bsd-3-clause | 618 | 0 | 9 | 79 | 174 | 99 | 75 | 12 | 1 |
module Distribution.Simple.Test.ExeV10
( runTest
) where
import Distribution.Compat.CreatePipe ( createPipe )
import Distribution.Compat.Environment ( getEnvironment )
import qualified Distribution.PackageDescription as PD
import Distribution.Simple.Build.PathsModule ( pkgPathEnvVar )
import Distribution.Simple.BuildPaths ( exeExtension )
import Distribution.Simple.Compiler ( compilerInfo )
import Distribution.Simple.Hpc ( guessWay, markupTest, tixDir, tixFilePath )
import Distribution.Simple.InstallDirs
( fromPathTemplate, initialPathTemplateEnv, PathTemplateVariable(..)
, substPathTemplate , toPathTemplate, PathTemplate )
import qualified Distribution.Simple.LocalBuildInfo as LBI
import Distribution.Simple.Setup
( TestFlags(..), TestShowDetails(..), fromFlag, configCoverage )
import Distribution.Simple.Test.Log
import Distribution.Simple.Utils
( die, notice, rawSystemIOWithEnv, addLibraryPath )
import Distribution.System ( Platform (..) )
import Distribution.TestSuite
import Distribution.Text
import Distribution.Verbosity ( normal )
import Control.Concurrent (forkIO)
import Control.Monad ( unless, void, when )
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist
, getCurrentDirectory, removeDirectoryRecursive )
import System.Exit ( ExitCode(..) )
import System.FilePath ( (</>), (<.>) )
import System.IO ( hGetContents, hPutStr, stdout, stderr )
runTest :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> TestFlags
-> PD.TestSuite
-> IO TestSuiteLog
runTest pkg_descr lbi flags suite = do
let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi
way = guessWay lbi
tixDir_ = tixDir distPref way $ PD.testName suite
pwd <- getCurrentDirectory
existingEnv <- getEnvironment
let cmd = LBI.buildDir lbi </> PD.testName suite
</> PD.testName suite <.> exeExtension
-- Check that the test executable exists.
exists <- doesFileExist cmd
unless exists $ die $ "Error: Could not find test program \"" ++ cmd
++ "\". Did you build the package first?"
-- Remove old .tix files if appropriate.
unless (fromFlag $ testKeepTix flags) $ do
exists' <- doesDirectoryExist tixDir_
when exists' $ removeDirectoryRecursive tixDir_
-- Create directory for HPC files.
createDirectoryIfMissing True tixDir_
-- Write summary notices indicating start of test suite
notice verbosity $ summarizeSuiteStart $ PD.testName suite
(wOut, wErr, logText) <- case details of
Direct -> return (stdout, stderr, "")
_ -> do
(rOut, wOut) <- createPipe
-- Read test executable's output lazily (returns immediately)
logText <- hGetContents rOut
-- Force the IO manager to drain the test output pipe
void $ forkIO $ length logText `seq` return ()
-- '--show-details=streaming': print the log output in another thread
when (details == Streaming) $ void $ forkIO $ hPutStr stdout logText
return (wOut, wOut, logText)
-- Run the test executable
let opts = map (testOption pkg_descr lbi suite)
(testOptions flags)
dataDirPath = pwd </> PD.dataDir pkg_descr
tixFile = pwd </> tixFilePath distPref way (PD.testName suite)
pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath)
: existingEnv
shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] ++ pkgPathEnv
-- Add (DY)LD_LIBRARY_PATH if needed
shellEnv' <- if LBI.withDynExe lbi
then do let (Platform _ os) = LBI.hostPlatform lbi
clbi = LBI.getComponentLocalBuildInfo lbi
(LBI.CTestName (PD.testName suite))
paths <- LBI.depLibraryPaths True False lbi clbi
return (addLibraryPath os paths shellEnv)
else return shellEnv
exit <- rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv')
-- these handles are automatically closed
Nothing (Just wOut) (Just wErr)
-- Generate TestSuiteLog from executable exit code and a machine-
-- readable test log.
let suiteLog = buildLog exit
-- Write summary notice to log file indicating start of test suite
appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite
-- Append contents of temporary log file to the final human-
-- readable log file
appendFile (logFile suiteLog) logText
-- Write end-of-suite summary notice to log file
appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog
-- Show the contents of the human-readable log file on the terminal
-- if there is a failure and/or detailed output is requested
let whenPrinting = when $
( details == Always ||
details == Failures && not (suitePassed $ testLogs suiteLog))
-- verbosity overrides show-details
&& verbosity >= normal
whenPrinting $ putStr $ unlines $ lines logText
-- Write summary notice to terminal indicating end of test suite
notice verbosity $ summarizeSuiteFinish suiteLog
when isCoverageEnabled $
markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite
return suiteLog
where
distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
details = fromFlag $ testShowDetails flags
testLogDir = distPref </> "test"
buildLog exit =
let r = case exit of
ExitSuccess -> Pass
ExitFailure c -> Fail $ "exit code: " ++ show c
n = PD.testName suite
l = TestLog
{ testName = n
, testOptionsReturned = []
, testResult = r
}
in TestSuiteLog
{ testSuiteName = n
, testLogs = l
, logFile =
testLogDir
</> testSuiteLogPath (fromFlag $ testHumanLog flags)
pkg_descr lbi n l
}
-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't
-- necessarily a path.
testOption :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> PD.TestSuite
-> PathTemplate
-> String
testOption pkg_descr lbi suite template =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (LBI.localComponentId lbi)
(compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++
[(TestSuiteNameVar, toPathTemplate $ PD.testName suite)]
| randen/cabal | Cabal/Distribution/Simple/Test/ExeV10.hs | bsd-3-clause | 6,900 | 0 | 19 | 1,954 | 1,482 | 783 | 699 | 119 | 4 |
{-# LANGUAGE TemplateHaskell, OverloadedStrings, QuasiQuotes #-}
{-# LANGUAGE CPP #-}
module Handler.Skills
( postAllSkillsR
, getAllSkillsR
, getSkillR
) where
import Import
import Handler.Admin (requireAdmin)
skillFormlet :: Form Skill
skillFormlet = renderTable $ Skill
<$> areq textField "Skill name" { fsId = Just "skill-name" } Nothing
postAllSkillsR :: Handler ()
postAllSkillsR = do
requireAdmin
((res, _), _) <- runFormPostNoToken skillFormlet
case res of
FormSuccess skill -> do
_ <- runDB $ insert skill
setMessage "Inserted new skill to skills list"
_ -> setMessage "Invalid skill entered"
redirect AllSkillsR
getAllSkillsR :: Handler TypedContent
getAllSkillsR = do
mu <- maybeAuth
skills' <- runDB $ selectList [] [Asc SkillName] >>= mapM (\(Entity sid s) -> do
users <- count [UserSkillSkill ==. sid]
return ((sid, s), users)
)
showall <- fmap (maybe False id) $ runInputGet $ iopt boolField "show-all"
((_, form), _) <- runFormGet skillFormlet
let threshhold = 10
let skills =
if showall
then skills'
else filter (\(_, x) -> x >= threshhold) skills'
let hidden = length skills' - length skills
let areHidden = hidden > 0
let showAllUrl = (AllSkillsR, [("show-all", "yes")])
render <- getUrlRender
defaultLayoutJson (do
setTitle "Browse all skills"
$(widgetFile "skills")
) $ return $ object
[ "skills" .= array (flip map skills' $ \((sid, Skill name), users) ->
object
[ "id" .= toPathPiece sid
, "name" .= name
, "url" .= render (SkillR sid)
, "users" .= show users
])
]
getSkillR :: SkillId -> Handler TypedContent
getSkillR sid = do
skill <- runDB $ get404 sid
users <- runDB $ do
uids <- fmap (map $ userSkillUser . entityVal)
$ selectList [ UserSkillSkill ==. sid
] []
us <- mapM get404 uids
flip mapM (filter go $ zip uids us) $ \(uid, u) -> do
mun <- fmap (fmap entityVal) $ getBy $ UniqueUsernameUser uid
return ((uid, u), mun)
render <- getUrlRender
defaultLayoutJson (do
setTitle $ toHtml $ skillName skill
$(widgetFile "skill")
) $ return $ object
[ "users" .= array (flip map users $ \x@((uid, u), _) -> object
[ "id" .= toPathPiece uid
, "url" .= render (userR x)
, "name" .= userFullName u
])
]
where
go (_, u) = userVerifiedEmail u && userVisible u && not (userBlocked u)
| danse/haskellers | Handler/Skills.hs | bsd-2-clause | 2,736 | 0 | 20 | 899 | 903 | 448 | 455 | 69 | 2 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE PatternGuards #-}
module Migrations.M_17 (migration) where
import UpgradeEngine
--
-- GLENN SP-1
--
migration = Migration {
sourceVersion = 17
, targetVersion = 18
, actions = act
}
act :: IO ()
act = xformPrimaryJSON $ jsSet "/display-driver-whitelist" (jsBoxString "vga,citrix,intel,RDP,netmeeting,Mirage")
| jean-edouard/manager | upgrade-db/Migrations/M_17.hs | gpl-2.0 | 1,145 | 0 | 8 | 240 | 90 | 61 | 29 | 9 | 1 |
-- | The function here exploit matches of arity 2 that split the uncovered set
-- in two. Too many for -fmax-pmcheck-models=0!
-- As a result, these functions elicit the symptoms describe in the warnings
-- message, e.g.
-- - False positives on exhaustivity
-- - Turns redundant into inaccessible clauses
-- - Fails to report redundant matches
module TooManyDeltas where
data T = A | B
-- | Reports that a clause for _ _ is missing.
f :: T -> T -> ()
f A A = ()
-- | Reports that the third clause is inaccessible, when really it is
-- redundant.
g :: T -> T -> ()
g _ A = ()
g A A = () -- inaccessible, correctly flagged
g A A = () -- redundant, not inaccessible!
g _ _ = () -- (this one is not about exhaustivity)
-- | Fails to report that the second clause is redundant.
h :: T -> T -> ()
h A A = () -- covered, emits no warning
h A A = () -- redundant, not covered!
h _ _ = () -- (this one is not about exhaustivity)
| sdiehl/ghc | testsuite/tests/pmcheck/should_compile/TooManyDeltas.hs | bsd-3-clause | 930 | 0 | 7 | 206 | 172 | 98 | 74 | 13 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Solver.Types.PackageIndex
-- Copyright : (c) David Himmelstrup 2005,
-- Bjorn Bringert 2007,
-- Duncan Coutts 2008
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- An index of packages.
--
module Distribution.Solver.Types.PackageIndex (
-- * Package index data type
PackageIndex,
-- * Creating an index
fromList,
-- * Updates
merge,
insert,
deletePackageName,
deletePackageId,
deleteDependency,
-- * Queries
-- ** Precise lookups
elemByPackageId,
elemByPackageName,
lookupPackageName,
lookupPackageId,
lookupDependency,
-- ** Case-insensitive searches
searchByName,
SearchResult(..),
searchByNameSubstring,
-- ** Bulk queries
allPackages,
allPackagesByName,
) where
import Prelude hiding (lookup)
import Control.Exception (assert)
import qualified Data.Map as Map
import Data.Map (Map)
import Data.List (groupBy, sortBy, isInfixOf)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(..))
#endif
import Data.Maybe (isJust, fromMaybe)
import GHC.Generics (Generic)
import Distribution.Compat.Binary (Binary)
import Distribution.Compat.Semigroup (Semigroup((<>)))
import Distribution.Package
( PackageName(..), PackageIdentifier(..)
, Package(..), packageName, packageVersion
, Dependency(Dependency) )
import Distribution.Version
( withinRange )
import Distribution.Simple.Utils
( lowercase, comparing )
-- | The collection of information about packages from one or more 'PackageDB's.
--
-- It can be searched efficiently by package name and version.
--
newtype PackageIndex pkg = PackageIndex
-- This index package names to all the package records matching that package
-- name case-sensitively. It includes all versions.
--
-- This allows us to find all versions satisfying a dependency.
-- Most queries are a map lookup followed by a linear scan of the bucket.
--
(Map PackageName [pkg])
deriving (Eq, Show, Read, Functor, Generic)
--FIXME: the Functor instance here relies on no package id changes
instance Package pkg => Semigroup (PackageIndex pkg) where
(<>) = merge
instance Package pkg => Monoid (PackageIndex pkg) where
mempty = PackageIndex Map.empty
mappend = (<>)
--save one mappend with empty in the common case:
mconcat [] = mempty
mconcat xs = foldr1 mappend xs
instance Binary pkg => Binary (PackageIndex pkg)
invariant :: Package pkg => PackageIndex pkg -> Bool
invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m)
where
goodBucket _ [] = False
goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0
where
check pkgid [] = packageName pkgid == name
check pkgid (pkg':pkgs) = packageName pkgid == name
&& pkgid < pkgid'
&& check pkgid' pkgs
where pkgid' = packageId pkg'
--
-- * Internal helpers
--
mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg
mkPackageIndex index = assert (invariant (PackageIndex index))
(PackageIndex index)
internalError :: String -> a
internalError name = error ("PackageIndex." ++ name ++ ": internal error")
-- | Lookup a name in the index to get all packages that match that name
-- case-sensitively.
--
lookup :: PackageIndex pkg -> PackageName -> [pkg]
lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m
--
-- * Construction
--
-- | Build an index out of a bunch of packages.
--
-- If there are duplicates, later ones mask earlier ones.
--
fromList :: Package pkg => [pkg] -> PackageIndex pkg
fromList pkgs = mkPackageIndex
. Map.map fixBucket
. Map.fromListWith (++)
$ [ (packageName pkg, [pkg])
| pkg <- pkgs ]
where
fixBucket = -- out of groups of duplicates, later ones mask earlier ones
-- but Map.fromListWith (++) constructs groups in reverse order
map head
-- Eq instance for PackageIdentifier is wrong, so use Ord:
. groupBy (\a b -> EQ == comparing packageId a b)
-- relies on sortBy being a stable sort so we
-- can pick consistently among duplicates
. sortBy (comparing packageId)
--
-- * Updates
--
-- | Merge two indexes.
--
-- Packages from the second mask packages of the same exact name
-- (case-sensitively) from the first.
--
merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg
merge i1@(PackageIndex m1) i2@(PackageIndex m2) =
assert (invariant i1 && invariant i2) $
mkPackageIndex (Map.unionWith mergeBuckets m1 m2)
-- | Elements in the second list mask those in the first.
mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg]
mergeBuckets [] ys = ys
mergeBuckets xs [] = xs
mergeBuckets xs@(x:xs') ys@(y:ys') =
case packageId x `compare` packageId y of
GT -> y : mergeBuckets xs ys'
EQ -> y : mergeBuckets xs' ys'
LT -> x : mergeBuckets xs' ys
-- | Inserts a single package into the index.
--
-- This is equivalent to (but slightly quicker than) using 'mappend' or
-- 'merge' with a singleton index.
--
insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg
insert pkg (PackageIndex index) = mkPackageIndex $
Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index
where
pkgid = packageId pkg
insertNoDup [] = [pkg]
insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of
LT -> pkg : pkgs
EQ -> pkg : pkgs'
GT -> pkg' : insertNoDup pkgs'
-- | Internal delete helper.
--
delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg -> PackageIndex pkg
delete name p (PackageIndex index) = mkPackageIndex $
Map.update filterBucket name index
where
filterBucket = deleteEmptyBucket
. filter (not . p)
deleteEmptyBucket [] = Nothing
deleteEmptyBucket remaining = Just remaining
-- | Removes a single package from the index.
--
deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg -> PackageIndex pkg
deletePackageId pkgid =
delete (packageName pkgid) (\pkg -> packageId pkg == pkgid)
-- | Removes all packages with this (case-sensitive) name from the index.
--
deletePackageName :: Package pkg => PackageName -> PackageIndex pkg -> PackageIndex pkg
deletePackageName name =
delete name (\pkg -> packageName pkg == name)
-- | Removes all packages satisfying this dependency from the index.
--
deleteDependency :: Package pkg => Dependency -> PackageIndex pkg -> PackageIndex pkg
deleteDependency (Dependency name verstionRange) =
delete name (\pkg -> packageVersion pkg `withinRange` verstionRange)
--
-- * Bulk queries
--
-- | Get all the packages from the index.
--
allPackages :: PackageIndex pkg -> [pkg]
allPackages (PackageIndex m) = concat (Map.elems m)
-- | Get all the packages from the index.
--
-- They are grouped by package name, case-sensitively.
--
allPackagesByName :: PackageIndex pkg -> [[pkg]]
allPackagesByName (PackageIndex m) = Map.elems m
--
-- * Lookups
--
elemByPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Bool
elemByPackageId index = isJust . lookupPackageId index
elemByPackageName :: Package pkg => PackageIndex pkg -> PackageName -> Bool
elemByPackageName index = not . null . lookupPackageName index
-- | Does a lookup by package id (name & version).
--
-- Since multiple package DBs mask each other case-sensitively by package name,
-- then we get back at most one package.
--
lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Maybe pkg
lookupPackageId index pkgid =
case [ pkg | pkg <- lookup index (packageName pkgid)
, packageId pkg == pkgid ] of
[] -> Nothing
[pkg] -> Just pkg
_ -> internalError "lookupPackageIdentifier"
-- | Does a case-sensitive search by package name.
--
lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg]
lookupPackageName index name =
[ pkg | pkg <- lookup index name
, packageName pkg == name ]
-- | Does a case-sensitive search by package name and a range of versions.
--
-- We get back any number of versions of the specified package name, all
-- satisfying the version range constraint.
--
lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg]
lookupDependency index (Dependency name versionRange) =
[ pkg | pkg <- lookup index name
, packageName pkg == name
, packageVersion pkg `withinRange` versionRange ]
--
-- * Case insensitive name lookups
--
-- | Does a case-insensitive search by package name.
--
-- If there is only one package that compares case-insensitively to this name
-- then the search is unambiguous and we get back all versions of that package.
-- If several match case-insensitively but one matches exactly then it is also
-- unambiguous.
--
-- If however several match case-insensitively and none match exactly then we
-- have an ambiguous result, and we get back all the versions of all the
-- packages. The list of ambiguous results is split by exact package name. So
-- it is a non-empty list of non-empty lists.
--
searchByName :: PackageIndex pkg
-> String -> [(PackageName, [pkg])]
searchByName (PackageIndex m) name =
[ pkgs
| pkgs@(PackageName name',_) <- Map.toList m
, lowercase name' == lname ]
where
lname = lowercase name
data SearchResult a = None | Unambiguous a | Ambiguous [a]
-- | Does a case-insensitive substring search by package name.
--
-- That is, all packages that contain the given string in their name.
--
searchByNameSubstring :: PackageIndex pkg
-> String -> [(PackageName, [pkg])]
searchByNameSubstring (PackageIndex m) searchterm =
[ pkgs
| pkgs@(PackageName name, _) <- Map.toList m
, lsearchterm `isInfixOf` lowercase name ]
where
lsearchterm = lowercase searchterm
| headprogrammingczar/cabal | cabal-install/Distribution/Solver/Types/PackageIndex.hs | bsd-3-clause | 10,255 | 0 | 13 | 2,289 | 2,256 | 1,230 | 1,026 | 152 | 4 |
{-# LANGUAGE ImpredicativeTypes #-}
module T12644 where
data T a = T1 Int
instance Show (T a) where
show (T1 x) = show x
t1 :: T a
t1 = T1 1
f :: String
f = show t1
| olsner/ghc | testsuite/tests/typecheck/should_compile/T12644.hs | bsd-3-clause | 172 | 0 | 8 | 46 | 75 | 40 | 35 | 9 | 1 |
module SafeRecomp01 where
f :: Int
f = 1
| ghc-android/ghc | testsuite/tests/safeHaskell/safeLanguage/SafeRecomp01.hs | bsd-3-clause | 43 | 0 | 4 | 11 | 14 | 9 | 5 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module IRTS.CodegenLua(codegenLua) where
import IRTS.CodegenCommon
import IRTS.Lang
import IRTS.Simplified
import Idris.Core.TT as TT
import Data.Bits
import qualified Data.List as DL
import Data.Maybe
import Data.Char
import Data.String(IsString, fromString)
import qualified Data.Text as T
import Language.Lua.PrettyPrinter
import Language.Lua as L
import Paths_idris_lua
codegenLua :: CodeGenerator
codegenLua ci = do let out = Block (map doCodegen (simpleDecls ci) ++ [start]) Nothing
let decls = LocalAssign ["idris"] (Just [TableConst []])
let src = [decls] `meld` out
let code = render src
dir <- getDataDir
let shebang = "#!/usr/bin/env luajit\n"
bilib <- readFile $ dir ++ "/rts/bigint.lua"
rtslib <- readFile $ dir ++ "/rts/rts.lua"
writeFile (outputFile ci) (shebang ++ bilib ++ rtslib ++ code)
render :: Block -> String
render s = displayS (renderPretty 0.4 150 (pprint s)) ""
start = funcall (qName (sMN 0 "runMain")) []
variable s = PrefixExp $ PEVar $ VarName s
pfuncall f a = PrefixExp $ PEFunCall $ NormalFunCall (PEVar (VarName f)) (Args a)
funcall f a = FunCall $ NormalFunCall (PEVar (VarName f)) (Args a)
table t n = PrefixExp $ PEVar $ Select (PEVar (VarName t)) (number (n + 1))
number n = Number $ T.pack $ show n
string s = String $ T.pack $ show s
instance IsString L.Name where
fromString = L.Name . T.pack
luaName n = L.Name $ T.pack $ mangledName n
mangledName n = "idris_" ++ concatMap alphanum (showCG n)
where alphanum x | isAlpha x || isDigit x = [x]
| otherwise = "_" ++ show (fromEnum x) ++ "_"
idrisModule = "idris"
qName s = L.Name $ T.pack $ idrisModule ++ "." ++ mangledName s
var :: TT.Name -> L.Name
var (UN s) = L.Name s
loc :: Int -> L.Name
loc i = L.Name $ T.pack $ "loc" ++ show i
getFunName :: (TT.Name, SDecl) -> Stat
getFunName (n, _) = LocalAssign [luaName n] Nothing
doCodegen :: (TT.Name, SDecl) -> Stat
doCodegen (n, SFun _ args i def) = cgFun n args def
cgFun :: TT.Name -> [TT.Name] -> SExp -> Stat
cgFun n args def =
Assign [SelectName (PEVar $ VarName "idris") (luaName n)]
[EFunDef $ FunBody (map (loc . fst) (zip [0..] args)) False body]
where
doRet bs e = Block bs (Just [e])
(locals, block) = cgBody doRet def
maxArg = length args - 1
body = map local (DL.nub $ filter (> maxArg) locals) `meld` block
concatBody :: ([Int], Block) -> ([Int], Block) -> ([Int], Block)
concatBody (x, Block b1 _) (y, Block b2 r) = (x++y, Block (b1 ++ b2) r)
pasteBlocks :: Block -> Block -> Block
pasteBlocks (Block x1 _) (Block x2 e) = Block (x1++x2) e
meld xs (Block x e) = Block (xs++x) e
local :: Int -> Stat
local n = LocalAssign [loc n] Nothing
addLocal :: Int -> ([Int], Block) -> ([Int], Block)
addLocal n (ls, b) = (n:ls, b)
cgBody :: ([Stat] -> Exp -> Block) -> SExp -> ([Int], Block)
cgBody ret (SV (Glob n)) = ([], ret [] $ variable (luaName n))
cgBody ret (SV (Loc i)) = ([i], ret [] $ variable (loc i))
cgBody ret (SApp _ f args) = ([], ret [] $ pfuncall (qName f)
(map (variable . cgVar) args))
cgBody ret (SLet (Loc i) v sc)
= concatBody
(addLocal i $ cgBody (\x y -> Block
(x ++ [Assign [VarName $ loc i] [y]]) Nothing) v)
(cgBody ret sc)
cgBody ret (SUpdate n e)
= cgBody ret e
cgBody ret (SProj e i)
= ([], ret [] $ table (cgVar e) i)
cgBody ret (SCon _ t n args)
= ([], ret [] $ TableConst ((Field $ number t):map (Field . variable . cgVar) args))
cgBody ret (SCase _ e alts) = (concat locals, Block [If clauses Nothing] Nothing)
where conCase (SConCase _ _ _ _ _) = True
conCase _ = False
scrvar = cgVar e
scr = if any conCase alts then table scrvar 0 else variable scrvar
(locals, clauses) = unzip $ map (cgAlt ret scrvar scr) alts
cgBody ret (SChkCase e alts)
= ( concat locals, Block [If clauses Nothing] Nothing)
where conCase (SConCase _ _ _ _ _) = True
conCase _ = False
scrvar = cgVar e
scr = if any conCase alts then table scrvar 0 else variable scrvar
(locals, clauses) = unzip $ map (cgAlt ret scrvar scr) alts
cgBody ret (SConst c) = ([], ret [] $ cgConst c)
cgBody ret (SOp op args) = ([], ret [] $ cgOp op (map (variable . cgVar) args))
cgBody ret SNothing = ([], ret [] Nil)
cgBody ret (SError x) = ([], ret [] $ String $ T.pack $ "error( " ++ show x ++ ")")
cgBody ret (SForeign rt name args) = ([], ret [] $ handleForeign rt name args)
cgBody ret _ = ([], ret [] $ String "error(\"NOT IMPLEMENTED!!!!\")")
cgAlt :: ([Stat] -> Exp -> Block) -> L.Name -> Exp -> SAlt -> ([Int], (Exp, Block))
cgAlt ret scr test (SConstCase t exp)
= let (ls, block) = cgBody ret exp in
(ls, (Binop L.EQ test (cgConst t), block))
cgAlt ret scr test (SDefaultCase exp) =
let (ls, block) = cgBody ret exp in (ls, (L.Bool True, block))
cgAlt ret scr test (SConCase lv t n args exp)
= (locals lv args ++ ls, (Binop L.EQ test (number t),
project 1 lv args `meld` block))
where project i v [] = []
project i v (n : ns) = Assign [VarName $ loc v] [table scr i]:project (i + 1) (v + 1) ns
locals :: Int -> [a] -> [Int]
locals v [] = []
locals v (n:ns) = v:locals (v+1) ns
(ls, block) = cgBody ret exp
meld xs (Block x e) = Block (xs++x) e
cgVar :: LVar -> L.Name
cgVar (Loc i) = loc i
cgVar (Glob n) = var n
cgConst :: Const -> Exp
cgConst (I i) = number i
cgConst (Fl f) = number f
cgConst (Ch i) = number (ord i)
cgConst (BI i) = pfuncall "bigint" [String $ T.pack $ show i]
cgConst (TT.Str s) = String $ T.pack $ show s
cgConst (B8 b) = number b
cgConst (B16 b) = number b
cgConst (B32 b) = number b
cgConst (B64 b) | b < 2^50 = pfuncall "bigint" [number b]
| otherwise = pfuncall "bigint" [string b]
cgConst TheWorld = String "0"
cgConst x | isTypeConst x = String "0"
cgConst x = error $ "Constant " ++ show x ++ " not compilable yet"
luaAbs :: Exp -> Exp
luaAbs x = pfuncall "math.abs" [x]
boolInt :: Exp -> Exp
boolInt x = pfuncall "boolint" [x]
cap :: IntTy -> Exp -> Exp
cap (ITFixed IT64) x = Binop Mod x $ pfuncall "bigint" [String $ T.pack $ show (2^64)]
cap (ITFixed b) x = Binop Mod x $ number $ 2^nativeTyWidth b
cap _ x = x
capa :: ArithTy -> Exp -> Exp
capa (ATInt i) x = cap i x
capa _ x = x
cgOp :: PrimFn -> [Exp] -> Exp
cgOp (LPlus t) [l, r]
= capa t $ Binop Add l r
cgOp (LMinus t) [l, r]
= capa t $ Binop Sub l r
cgOp (LTimes t) [l, r]
= capa t $ Binop Mul l r
cgOp (LUDiv ITBig) [l, r]
= pfuncall "big_abs" [Binop Div l r]
cgOp (LUDiv (ITFixed IT64)) [l, r]
= pfuncall "big_abs" [Binop Div l r]
cgOp (LUDiv i) [l, r]
= cap i $ luaAbs $pfuncall "math.floor" [Binop Div l r]
cgOp (LSDiv (ATInt ITBig)) [l, r]
= Binop Div l r
cgOp (LSDiv (ATInt (ITFixed IT64))) [l, r]
= Binop Div l r
cgOp (LSDiv (ATInt i)) [l, r]
= cap i $ pfuncall "math.floor" [Binop Div l r]
cgOp (LSDiv ATFloat) [l, r]
= Binop Div l r
cgOp (LURem ITBig) [l, r]
= pfuncall "big_abs" [Binop Mod l r]
cgOp (LURem (ITFixed IT64)) [l, r]
= pfuncall "big_abs" [Binop Mod l r]
cgOp (LURem i) [l, r]
= cap i $ luaAbs $ Binop Mod l r
cgOp (LSRem t) [l, r]
= capa t $ Binop Mod l r
cgOp (LAnd ITBig) [l, r] = pfuncall "big_and" [l, r]
cgOp (LAnd (ITFixed IT64)) [l, r] = pfuncall "big_and" [l, r]
cgOp (LAnd i) [l, r]
= cap i $ pfuncall "bit.band" [l, r]
cgOp (LOr ITBig) [l, r] = pfuncall "big_or" [l, r]
cgOp (LOr (ITFixed IT64)) [l, r] = pfuncall "big_or" [l, r]
cgOp (LOr i) [l, r]
= cap i $ pfuncall "bit.bor" [l, r]
cgOp (LXOr ITBig) [l, r] = pfuncall "big_xor" [l, r]
cgOp (LXOr (ITFixed IT64)) [l, r] = pfuncall "big_xor" [l, r]
cgOp (LXOr i) [l, r]
= cap i $ pfuncall "bit.bxor" [l, r]
cgOp (LCompl ITBig) [b] = pfuncall "big_not" [b]
cgOp (LCompl (ITFixed IT64)) [b] = pfuncall "big_not" [b, Number "64"]
cgOp (LCompl i) [b]
= cap i $ pfuncall "bit.bnot" [b]
cgOp (LSHL ITBig) [l, r] = pfuncall "big_lshift" [l, r]
cgOp (LSHL (ITFixed IT64)) [l, r] = pfuncall "big_lshift" [l, r]
cgOp (LSHL i) [l, r]
= cap i $ pfuncall "bit.lshift" [l, r]
cgOp (LLSHR ITBig) [l, r] = pfuncall "big_rshift" [l, r]
cgOp (LLSHR (ITFixed IT64)) [l, r] = pfuncall "big_rshift" [l, r]
cgOp (LLSHR i) [l, r]
= cap i $ pfuncall "bit.rshift" [l, r]
cgOp (LASHR ITBig) [l, r] = pfuncall "big_rshift" [l, r]
cgOp (LASHR (ITFixed IT64)) [l, r] = pfuncall "big_arshift64" [l, r]
cgOp (LASHR i) [l, r]
= cap i $ pfuncall "bit.arshift" [l, r]
cgOp (LEq _) [l, r]
= boolInt $ Binop L.EQ l r
cgOp (LLt _) [l, r]
= boolInt $ Binop L.LT l r
cgOp (LLe _) [l, r]
= boolInt $ Binop LTE l r
cgOp (LGt _) [l, r]
= boolInt $ Binop L.GT l r
cgOp (LSLt _) [l, r]
= boolInt $ Binop L.LT l r
cgOp (LSLe _) [l, r]
= boolInt $ Binop LTE l r
cgOp (LSGt _) [l, r]
= boolInt $ Binop L.GT l r
cgOp (LSGe _) [l, r]
= boolInt $ Binop GTE l r
cgOp (LSExt ITBig (ITFixed IT64)) [x] = x
cgOp (LSExt (ITFixed IT64) ITBig) [x] = x
cgOp (LSExt _ (ITFixed IT64)) [x] = pfuncall "bigint" [x]
cgOp (LSExt _ ITBig) [x] = pfuncall "bigint" [x]
cgOp (LSExt _ _) [x] = x
cgOp (LZExt ITBig (ITFixed IT64)) [x] = x
cgOp (LZExt (ITFixed IT64) ITBig) [x] = x
cgOp (LZExt _ (ITFixed IT64)) [x] = pfuncall "bigint" [x]
cgOp (LZExt _ ITBig) [x] = pfuncall "bigint" [x]
cgOp (LZExt _ _) [x] = x
cgOp (LTrunc (ITFixed IT64) ITBig) [x] = x
cgOp (LTrunc _ ITBig) [x] = pfuncall "bigint" [x]
cgOp (LTrunc ITBig it@(ITFixed IT64)) [x] = cap it x
cgOp (LTrunc _ (ITFixed IT64)) [x] = pfuncall "bigint" [x]
cgOp (LTrunc ITBig i) [x] = cap i $ pfuncall "big_trunc32" [x]
cgOp (LTrunc (ITFixed IT64) i) [x] = cap i $ pfuncall "big_trunc32" [x]
cgOp (LTrunc _ i) [x] = cap i x
cgOp LStrConcat [l,r] = Binop Concat l r
cgOp LStrLt [l,r] = boolInt $ Binop L.LT l r
cgOp LStrEq [l,r] = boolInt $ Binop L.EQ l r
cgOp LStrLen [x] = pfuncall "string.len" [x]
cgOp (LIntFloat _) [x] = x
cgOp (LFloatInt _) [x] = pfuncall "math.floor" [x]
cgOp (LIntStr _) [x] = pfuncall "tostring" [x]
cgOp (LStrInt ITBig) [x] = pfuncall "bigint" [x]
cgOp (LStrInt (ITFixed IT64)) [x] = pfuncall "bigint" [x]
cgOp (LStrInt _) [x] = pfuncall "tonumber" [x]
cgOp LFloatStr [x] = pfuncall "tostring" [x]
cgOp LStrFloat [x] = pfuncall "tonumber" [x]
cgOp (LChInt _) [x] = x
cgOp (LIntCh _) [x] = x
cgOp (LBitCast _ _) [x] = x
cgOp LFExp [x] = pfuncall "math.exp" [x]
cgOp LFLog [x] = pfuncall "math.log" [x]
cgOp LFSin [x] = pfuncall "math.sin" [x]
cgOp LFCos [x] = pfuncall "math.cos" [x]
cgOp LFTan [x] = pfuncall "math.tan" [x]
cgOp LFASin [x] = pfuncall "math.asin" [x]
cgOp LFACos [x] = pfuncall "math.acos" [x]
cgOp LFATan [x] = pfuncall "math.atan" [x]
cgOp LFSqrt [x] = pfuncall "math.sqrt" [x]
cgOp LFFloor [x] = pfuncall "math.floor" [x]
cgOp LFCeil [x] = pfuncall "math.ceil" [x]
cgOp LFNegate [x] = Unop Neg x
cgOp LStrHead [x] = pfuncall "string.byte" [x, number 1]
cgOp LStrTail [x] = pfuncall "string.sub" [x, number 2]
cgOp LStrCons [l,r] = Binop Concat (pfuncall "string.char" [l]) r
cgOp LStrIndex [x, y] = pfuncall "string.byte" [pfuncall "string.sub" [x, Binop Add y (number 1), Binop Add y (number 2)], number 1]
cgOp LStrRev [x] = pfuncall "string.reverse" [x]
cgOp LStrSubstr [x, y, z] = pfuncall "string.sub" [x, Binop Add y (number 1), Binop Add z (number 1)]
cgOp LWriteStr [_,s] = pfuncall "io.output(io.stdout):write" [s]
cgOp LReadStr [_] = pfuncall "io.input(io.stdin):read" []
cgOp LSystemInfo [x] = pfuncall "sysinfo" [x]
-- cgOp LFork
-- cgOp LPar
-- cgOp (LExternal n)
-- cgOp LNoOp
cgOp op exps = pfuncall "print" [String $ T.pack $ "error(\"OPERATOR " ++ show op ++ " NOT IMPLEMENTED!!!!\")"]
-- error("Operator " ++ show op ++ " not implemented")
handleForeign :: FDesc -> FDesc -> [(FDesc, LVar)] -> Exp
handleForeign ret name args = undefined | melted/idris-lua | src/IRTS/CodegenLua.hs | isc | 12,063 | 0 | 18 | 2,909 | 6,377 | 3,287 | 3,090 | 275 | 5 |
module Main (main) where
import Control.Exception as E
import Text.Pandoc.JSON
import AsciiMath
main :: IO ()
main = E.catch (toJSONFilter asciimath) printAndExit
where asciimath (Math t s) = Math t (run s)
asciimath x = x
| Kerl13/AsciiMath | src/bin/Pandoc-AsciiMath.hs | mit | 236 | 0 | 9 | 49 | 90 | 49 | 41 | 8 | 2 |
{- CIS 194 HW 11
due Monday, 8 April
-}
module SExpr where
import AParser
import Control.Applicative
import Data.Char
------------------------------------------------------------
-- 1. Parsing repetitions
------------------------------------------------------------
zeroOrMore :: Parser a -> Parser [a]
zeroOrMore p = oneOrMore p <|> pure []
oneOrMore :: Parser a -> Parser [a]
oneOrMore p = (:) <$> p <*> zeroOrMore p
------------------------------------------------------------
-- 2. Utilities
------------------------------------------------------------
spaces :: Parser String
spaces = zeroOrMore (satisfy isSpace)
ident :: Parser String
ident = (:) <$> (satisfy isAlpha) <*> zeroOrMore (satisfy isAlphaNum)
------------------------------------------------------------
-- 3. Parsing S-expressions
------------------------------------------------------------
-- An "identifier" is represented as just a String; however, only
-- those Strings consisting of a letter followed by any number of
-- letters and digits are valid identifiers.
type Ident = String
-- An "atom" is either an integer value or an identifier.
data Atom = N Integer | I Ident
deriving Show
-- An S-expression is either an atom, or a list of S-expressions.
data SExpr = A Atom
| Comb [SExpr]
deriving Show
parseSExpr :: Parser SExpr
parseSExpr = Comb <$> comb <|> A <$> atom
where atom = spaces *> (N <$> posInt) <|> (I <$> ident) <* spaces
comb = openParen *> zeroOrMore parseSExpr <* closeParen
openParen = spaces *> char '(' *> spaces
closeParen = spaces <* char ')' <* spaces | limdauto/learning-haskell | cis194/week10-11/SExpr.hs | mit | 1,620 | 0 | 11 | 286 | 318 | 176 | 142 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Nauva.Product.Template.App (app) where
import Nauva.App
import Nauva.View
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
app :: App
app = App
{ rootElement = \appH -> constHead (headH appH) headElements $ div_ [style_ style]
[ header
, intro
]
}
where
headElements =
[ style_ [str_ "*,*::before,*::after{box-sizing:inherit}body{margin:0;box-sizing:border-box;font-family:-apple-system, BlinkMacSystemFont, \"Segoe UI\", Roboto, Helvetica, Arial, sans-serif, \"Apple Color Emoji\", \"Segoe UI Emoji\", \"Segoe UI Symbol\"}"]
]
style = mkStyle $ do
textAlign center
header :: Element
header = div_ [style_ style]
[ h1_ [str_ "Welcome to Nauva"]
]
where
style = mkStyle $ do
backgroundColor "#222"
height "150px"
padding "20px"
color "white"
intro :: Element
intro = p_ [style_ style]
[ str_ "To get started, edit "
, code_ [str_ thisFilePath]
, str_ " and save to reload."
]
where
style = mkStyle $ do
fontSize "large"
-- The path to this file. Here we use a bit of TemplateHaskell magic
-- so that we can show the exact path the user has to edit to get
-- started
thisFilePath = $(lift =<< loc_filename <$> location)
| wereHamster/nauva | product/template/shared/src/Nauva/Product/Template/App.hs | mit | 1,418 | 0 | 11 | 392 | 277 | 147 | 130 | 32 | 1 |
import Data.Array (Array, bounds, elems, listArray, (!))
import Data.List (intercalate)
import System.Random
data Point = Point Double Double
chaosGame :: RandomGen g => g -> Int -> Array Int (Double, (Point -> Point)) -> [Point]
chaosGame g n hutchinson = take n points
where
(x, g') = random g
(y, g'') = random g'
cumulProbabilities = scanl1 (+) $ map fst $ elems hutchinson
to_choice x = length $ takeWhile (x >) cumulProbabilities
picks = map to_choice $ randomRs (0, 1) g''
step = fmap snd hutchinson
points = Point x y : zipWith (step !) picks points
affine :: (Double, Double, Double, Double) -> (Double, Double) -> Point -> Point
affine (xx, xy, yx, yy) (a, b) (Point x y) = Point (a + xx * x + xy * y) (b + yx * x + yy * y)
showPoint :: Point -> String
showPoint (Point x y) = show x ++ "\t" ++ show y
main :: IO ()
main = do
g <- newStdGen
let barnsley =
listArray
(0, 3)
[ (0.01, affine (0, 0, 0, 0.16) (0, 0)),
(0.85, affine (0.85, 0.04, -0.04, 0.85) (0, 1.6)),
(0.07, affine (0.2, -0.26, 0.23, 0.22) (0, 1.6)),
(0.07, affine (-0.15, 0.28, 0.26, 0.24) (0, 0.44))
]
points = chaosGame g 100000 barnsley
writeFile "out.dat" $ intercalate "\n" $ map showPoint points
| leios/algorithm-archive | contents/barnsley/code/haskell/Barnsley.hs | mit | 1,302 | 0 | 15 | 348 | 614 | 336 | 278 | 29 | 1 |
module Proteome.Test.TagsTest where
import Hedgehog ((===))
import Path (Abs, Dir, File, Path, Rel, parseAbsDir, relfile, (</>))
import Path.IO (doesFileExist)
import Ribosome.Config.Setting (updateSetting)
import Ribosome.Test.Run (UnitTest)
import Ribosome.Test.Unit (tempDir)
import Proteome.Data.Env (Env)
import qualified Proteome.Data.Env as Env (mainProject)
import qualified Proteome.Data.Project as Project (lang, meta)
import Proteome.Data.ProjectLang (ProjectLang (ProjectLang))
import Proteome.Data.ProjectMetadata (ProjectMetadata (DirProject))
import Proteome.Data.ProjectName (ProjectName (ProjectName))
import Proteome.Data.ProjectRoot (ProjectRoot (ProjectRoot))
import Proteome.Data.ProjectType (ProjectType (ProjectType))
import qualified Proteome.Settings as S (tagsArgs, tagsCommand, tagsFork)
import Proteome.Tags (proTags)
import Proteome.Test.Config (vars)
import Proteome.Test.Unit (ProteomeTest, testWithDef)
main :: Path Abs Dir -> ProjectMetadata
main root = DirProject (ProjectName "flagellum") (ProjectRoot root) (Just (ProjectType "haskell"))
tagsTest :: ProteomeTest ()
tagsTest = do
root <- parseAbsDir =<< tempDir "projects/haskell/flagellum"
setL @Env (Env.mainProject . Project.meta) (main root)
setL @Env (Env.mainProject . Project.lang) (Just (ProjectLang "idris"))
updateSetting S.tagsCommand "touch"
updateSetting S.tagsArgs "tags-{langsComma}"
updateSetting S.tagsFork False
proTags
let tagsFile = root </> [relfile|tags-idris|]
exists <- liftIO $ doesFileExist tagsFile
True === exists
test_simpleTags :: UnitTest
test_simpleTags =
vars >>= testWithDef tagsTest
| tek/proteome | packages/test/test/Proteome/Test/TagsTest.hs | mit | 1,631 | 0 | 11 | 189 | 496 | 286 | 210 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module System.Process.ByteString where
import Control.Applicative ((<$>))
import Control.Monad
import Data.ByteString (ByteString)
import Data.ListLike.IO (hGetContents)
import Data.Word (Word8)
import Prelude hiding (null)
import System.Process
import System.Process.Common
import System.Exit (ExitCode)
-- | Like 'System.Process.readProcessWithExitCode', but using 'ByteString'
instance ListLikeProcessIO ByteString Word8 where
forceOutput = return
readChunks h = (: []) <$> hGetContents h
-- | Specialized version for backwards compatibility.
readProcessWithExitCode :: FilePath -> [String] -> ByteString -> IO (ExitCode, ByteString, ByteString)
readProcessWithExitCode = System.Process.Common.readProcessWithExitCode
readCreateProcessWithExitCode :: CreateProcess -> ByteString -> IO (ExitCode, ByteString, ByteString)
readCreateProcessWithExitCode = System.Process.Common.readCreateProcessWithExitCode
| davidlazar/process-extras | src/System/Process/ByteString.hs | mit | 996 | 0 | 9 | 106 | 200 | 121 | 79 | 19 | 1 |
module Text.Mustache.Types
where
import Data.Text (Text)
data Chunk = Var KeyPath
| UnescapedVar KeyPath
| Section KeyPath [Chunk] (Maybe Text) -- separator text
| InvertedSection KeyPath [Chunk]
| SetDelimiter String String -- a stateful operation
| Plain Text
| Partial FilePath
| Comment Text
deriving (Show, Read, Eq)
type KeyPath = [Key]
data Key = Key Text | Index Int deriving (Eq, Show, Read)
| danchoi/mustache-haskell | Text/Mustache/Types.hs | mit | 480 | 0 | 8 | 143 | 136 | 80 | 56 | 13 | 0 |
module AssociateTypeSpec (spec) where
import Test.Hspec
import Dicom.Network.Associate.Types
import Data.Binary
buildTestAssociateRQ::AssociateRQPDU
buildTestAssociateRQ = AssociateRQPDU{
arqPDUHeader = PDUHeader A_ASSOCIATE_RQ 0 76
, arqReserved = 0
, arqProtocolVersion = 0
, calledAETitle = "CALLEDAETITLE "
, callingAETitle = "CALLINGAETITLE "
, arqVariableItems = [buildARQItem]
, arqReserved2 = replicate 32 0 }
buildARQItem::ARQItem
buildARQItem = ApplicationContextItem {
acnHeader = ARQItemHeader ApplicationContextItemT 0 4
, acnContextName = "abcd"}
spec::Spec
spec = describe "Test Associate Types" $ do
it "AssociateRQ Type" $
let item = buildTestAssociateRQ
packed = encode item
in decode packed `shouldBe` item
it "AssociateRQ Type" $ do
let packed = packPDU buildTestAssociateRQ
unpackPDU packed `shouldBe` buildTestAssociateRQ
| danplubell/dicom-network | test-suite/AssociateTypeSpec.hs | mit | 1,027 | 0 | 14 | 287 | 215 | 119 | 96 | 26 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.VTTRegionList
(item, item_, itemUnsafe, itemUnchecked, getRegionById,
getRegionById_, getRegionByIdUnsafe, getRegionByIdUnchecked,
getLength, VTTRegionList(..), gTypeVTTRegionList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.item Mozilla VTTRegionList.item documentation>
item ::
(MonadDOM m) => VTTRegionList -> Word -> m (Maybe VTTRegion)
item self index
= liftDOM ((self ^. jsf "item" [toJSVal index]) >>= fromJSVal)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.item Mozilla VTTRegionList.item documentation>
item_ :: (MonadDOM m) => VTTRegionList -> Word -> m ()
item_ self index
= liftDOM (void (self ^. jsf "item" [toJSVal index]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.item Mozilla VTTRegionList.item documentation>
itemUnsafe ::
(MonadDOM m, HasCallStack) => VTTRegionList -> Word -> m VTTRegion
itemUnsafe self index
= liftDOM
(((self ^. jsf "item" [toJSVal index]) >>= fromJSVal) >>=
maybe (Prelude.error "Nothing to return") return)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.item Mozilla VTTRegionList.item documentation>
itemUnchecked ::
(MonadDOM m) => VTTRegionList -> Word -> m VTTRegion
itemUnchecked self index
= liftDOM
((self ^. jsf "item" [toJSVal index]) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.getRegionById Mozilla VTTRegionList.getRegionById documentation>
getRegionById ::
(MonadDOM m, ToJSString id) =>
VTTRegionList -> id -> m (Maybe VTTRegion)
getRegionById self id
= liftDOM
((self ^. jsf "getRegionById" [toJSVal id]) >>= fromJSVal)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.getRegionById Mozilla VTTRegionList.getRegionById documentation>
getRegionById_ ::
(MonadDOM m, ToJSString id) => VTTRegionList -> id -> m ()
getRegionById_ self id
= liftDOM (void (self ^. jsf "getRegionById" [toJSVal id]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.getRegionById Mozilla VTTRegionList.getRegionById documentation>
getRegionByIdUnsafe ::
(MonadDOM m, ToJSString id, HasCallStack) =>
VTTRegionList -> id -> m VTTRegion
getRegionByIdUnsafe self id
= liftDOM
(((self ^. jsf "getRegionById" [toJSVal id]) >>= fromJSVal) >>=
maybe (Prelude.error "Nothing to return") return)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.getRegionById Mozilla VTTRegionList.getRegionById documentation>
getRegionByIdUnchecked ::
(MonadDOM m, ToJSString id) => VTTRegionList -> id -> m VTTRegion
getRegionByIdUnchecked self id
= liftDOM
((self ^. jsf "getRegionById" [toJSVal id]) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/VTTRegionList.length Mozilla VTTRegionList.length documentation>
getLength :: (MonadDOM m) => VTTRegionList -> m Word
getLength self
= liftDOM (round <$> ((self ^. js "length") >>= valToNumber))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/VTTRegionList.hs | mit | 4,088 | 0 | 14 | 655 | 974 | 550 | 424 | -1 | -1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.ErrorEvent
(js_getMessage, getMessage, js_getFilename, getFilename,
js_getLineno, getLineno, js_getColno, getColno, ErrorEvent,
castToErrorEvent, gTypeErrorEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"message\"]" js_getMessage ::
ErrorEvent -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent.message Mozilla ErrorEvent.message documentation>
getMessage ::
(MonadIO m, FromJSString result) => ErrorEvent -> m result
getMessage self = liftIO (fromJSString <$> (js_getMessage (self)))
foreign import javascript unsafe "$1[\"filename\"]" js_getFilename
:: ErrorEvent -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent.filename Mozilla ErrorEvent.filename documentation>
getFilename ::
(MonadIO m, FromJSString result) => ErrorEvent -> m result
getFilename self
= liftIO (fromJSString <$> (js_getFilename (self)))
foreign import javascript unsafe "$1[\"lineno\"]" js_getLineno ::
ErrorEvent -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent.lineno Mozilla ErrorEvent.lineno documentation>
getLineno :: (MonadIO m) => ErrorEvent -> m Word
getLineno self = liftIO (js_getLineno (self))
foreign import javascript unsafe "$1[\"colno\"]" js_getColno ::
ErrorEvent -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent.colno Mozilla ErrorEvent.colno documentation>
getColno :: (MonadIO m) => ErrorEvent -> m Word
getColno self = liftIO (js_getColno (self)) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/ErrorEvent.hs | mit | 2,420 | 24 | 10 | 326 | 588 | 351 | 237 | 38 | 1 |
--
-- Chapter 2, 2.1 Lists
--
module Stack where
import Prelude as P
-- According to the book, methods names in Stack type class use list
-- nomenclature (cons, head, tail) rather then stack ones (push, top, pop),
-- because it regards stacks as an instance of general class of sequences.
class Stack t where
empty :: t a
isEmpty :: t a -> Bool
cons :: a -> t a -> t a
-- suffix with 'S' (for Stack) to not clash with Haskell Prelude
headS :: t a -> a
tailS :: t a -> t a
-- default implementation of concatenation using functions above
(+++) :: t a -> t a -> t a
xs +++ ys = if isEmpty xs then ys
else cons (headS xs) (tailS xs +++ ys)
--
-- Exercise 2.1
--
-- Rather concise implementation, showing expressiveness of Haskell, taking
-- into account that it is generic, using only available functions of this
-- type class.
--
-- Using built-in type implementation would be trivial, like:
--
-- suffixes :: [a] -> [[a]]
-- suffixes [] = [[]]
-- suffixes xs@(_:ys) = xs:suffixes ys
--
suffixes :: t a -> t (t a)
suffixes xs = if isEmpty xs then empty
else cons xs (suffixes $ tailS xs)
-- Data type using built-in Haskell list type.
data List a = List [a] deriving Show
-- and its implementation of Stack
instance Stack List where
empty = List []
isEmpty (List xs) = null xs
cons x (List xs) = List (x:xs)
headS (List xs) = P.head xs
tailS (List xs) = List (P.tail xs)
-- More "raw" data type using tuples.
data CustomList a = Nil | Cons (a, CustomList a) deriving Show
-- and its implementation of Stack
instance Stack CustomList where
empty = Nil
isEmpty Nil = True
isEmpty _ = False
cons x xs = Cons (x, xs)
headS Nil = undefined
headS (Cons (x, _)) = x
tailS Nil = undefined
tailS (Cons (_, xs)) = xs
-- Testing methods
main :: IO ()
main = do
mainList
mainCustomList
-- Generic tester for Stack type class
-- TODO: obviously I'm doing something wrong with 'Show' constraints
mainStack :: (Stack s, Show (s (s a)), Show (s a), Show a) => s a -> s a -> s a -> IO ()
mainStack emptyL xs ys = do
putStrLn $ "isEmpty empty: " ++ show (isEmpty emptyL)
putStrLn $ "isEmpty xs: " ++ show (isEmpty xs)
putStrLn $ "headS xs: " ++ show (headS xs)
putStrLn $ "tailS xs: " ++ show (tailS xs)
putStrLn $ "empty +++ xs: " ++ show (emptyL +++ xs)
putStrLn $ "xs +++ ys : " ++ show (xs +++ ys)
putStrLn $ "suffixes xs : " ++ show (suffixes xs)
-- Tester for List data type
mainList :: IO ()
mainList =
let emptyL = List []
xs = List [1..6]
ys = List [7..12]
in mainStack emptyL xs ys
-- Tester for CustomList data type
mainCustomList :: IO ()
mainCustomList =
let emptyL = Nil
xs = Cons (1, Cons (2, Cons (3, Cons (4, Cons (5, Cons (6, Nil))))))
ys = Cons (7, Cons (8, Cons (9, Cons (10, Cons (11, Cons (12, Nil))))))
in mainStack emptyL xs ys
| mkrauskopf/okasaki-pfds-haskell | src/mk/okasaki/chapter02/Stack.hs | mit | 2,943 | 0 | 20 | 761 | 952 | 496 | 456 | 56 | 1 |
{-|
Module : Control.Monad.Bayes.Traced.Static
Description : Distributions on execution traces of full programs
Copyright : (c) Adam Scibior, 2015-2020
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : GHC
-}
module Control.Monad.Bayes.Traced.Static (
Traced,
hoistT,
marginal,
mhStep,
mh
) where
import Control.Monad.Trans
import Control.Applicative (liftA2)
import Control.Monad.Bayes.Class
import Control.Monad.Bayes.Weighted as Weighted
import Control.Monad.Bayes.Free as FreeSampler
import Control.Monad.Bayes.Traced.Common
-- | A tracing monad where only a subset of random choices are traced.
-- The random choices that are not to be traced should be lifted
-- from the transformed monad.
data Traced m a = Traced (Weighted (FreeSampler m) a) (m (Trace a))
traceDist :: Traced m a -> m (Trace a)
traceDist (Traced _ d) = d
model :: Traced m a -> Weighted (FreeSampler m) a
model (Traced m _) = m
instance Monad m => Functor (Traced m) where
fmap f (Traced m d) = Traced (fmap f m) (fmap (fmap f) d)
instance Monad m => Applicative (Traced m) where
pure x = Traced (pure x) (pure (pure x))
(Traced mf df) <*> (Traced mx dx) = Traced (mf <*> mx) (liftA2 (<*>) df dx)
instance Monad m => Monad (Traced m) where
(Traced mx dx) >>= f = Traced my dy where
my = mx >>= model . f
dy = dx `bind` (traceDist . f)
instance MonadTrans Traced where
lift m = Traced (lift $ lift m) (fmap pure m)
instance MonadSample m => MonadSample (Traced m) where
random = Traced random (fmap singleton random)
instance MonadCond m => MonadCond (Traced m) where
score w = Traced (score w) (score w >> pure (scored w))
instance MonadInfer m => MonadInfer (Traced m)
hoistT :: (forall x. m x -> m x) -> Traced m a -> Traced m a
hoistT f (Traced m d) = Traced m (f d)
marginal :: Monad m => Traced m a -> m a
marginal (Traced _ d) = fmap output d
mhStep :: MonadSample m => Traced m a -> Traced m a
mhStep (Traced m d) = Traced m d' where
d' = d >>= mhTrans m
mh :: MonadSample m => Int -> Traced m a -> m [a]
mh n (Traced m d) = fmap (map output) t where
t = f n
f 0 = fmap (:[]) d
f k = do
~(x:xs) <- f (k-1)
y <- mhTrans m x
return (y:x:xs)
| adscib/monad-bayes | src/Control/Monad/Bayes/Traced/Static.hs | mit | 2,248 | 0 | 12 | 486 | 929 | 472 | 457 | -1 | -1 |
module Irg.Lab3.Lab3 where
import Control.Monad (replicateM)
import Irg.Lab3.Callbacks
import Irg.Lab3.Initialize
import Irg.Lab3.Utility
import Irg.Lab3.Geometry
import Irg.Lab3.ParseFile
import System.Directory
import Linear
coords :: [Float]
coords = [150,250]
myDots :: [[Float]]
myDots = fmap (++ [1]) (replicateM 3 coords)
dotVerts :: [V4 Float]
dotVerts = map (\[a,b,c,d] -> V4 a b c d) myDots
transformedDots :: [V4 Float]
transformedDots = map (getTransformationMatrix (V4 10 10 0 1) (V4 50 50 100 1) !*) dotVerts
cube3d :: [V4 Float]
cube3d = map (rotationXZ (pi/6) !*! rotationYZ (pi/6) !*) dotVerts
-- where
oo = V4 5 5 5 1
gg = V4 0 0 0 1
myMain :: IO ()
myMain = do
print "enter path"
path <- getLine
doesExist <- doesFileExist (folderLocation ++ path)
parsedFile <- if doesExist then parseFile (folderLocation ++ path) else parseFile fileLocation
-- parsedFile <- parseFile fileLocation
let myShape = map normalisePoly parsedFile
let transformedShape = transformPolygons oo gg parsedFile
-- print "my shape"
-- mapM_ print myShape
-- print "transformed shape"
-- mapM_ print transformedShape
let gameState = GameState myShape
initialize gameState reshapeCallback displayCallback keyboardCallback mouseCallback
| DominikDitoIvosevic/Uni | IRG/src/Irg/Lab3/Lab3.hs | mit | 1,265 | 0 | 11 | 218 | 426 | 230 | 196 | 31 | 2 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Gen.Literal
( hexGenLower
, hexGenMixedCase
, octalGen
, decimalGen
, floatGen
, runeGen
, rawStringGen
, interpStringGen
, identGen
, typeGen
) where
import Gen.Core
import Language.GoLite.Lexer.Literal ( commonEscapes, escapedChars )
import qualified Data.Map.Strict as Map -- To used `escapedChars`
import Data.Char ( chr )
import Test.QuickCheck.Gen ( Gen(MkGen) )
-- | Generates hexadecimal literals with lower-case digits
hexGenLower :: Gen String
hexGenLower = ("0x" ++) <$> mkDigits where
mkDigits
= sized
. flip replicateM
. elements
$ ['0'..'9'] ++ ['a'..'f']
-- | Generates hexadecimal literals with mixed-case digits
hexGenMixedCase :: Gen String
hexGenMixedCase = (++) <$> prefixes <*> mkDigits where
prefixes = elements ["0x", "0X"]
mkDigits
= sized
. flip replicateM
. elements
$ ['0'..'9'] ++ ['a'..'f'] ++ ['A'..'F']
-- | Generates decimal literals
decimalGen :: Gen String
decimalGen = (++) <$> start <*> rest where
start = (:[]) <$> elements ['1'..'9']
rest = sized . flip replicateM . elements $ ['0'..'9']
-- | Generates octal literals
octalGen :: Gen String
octalGen = ('0':) <$> (sized . flip replicateM . elements $ ['0'..'7'])
-- | Generates floating-point literals
floatGen :: Gen String
floatGen = (\x y -> x ++ "." ++ y) <$> decimalGen <*> decimalGen
-- | Generates rune literals (including escapes)
runeGen :: Gen String
runeGen = (surroundWith "'") <$> oneof [escape, normal] where
escape = (\c -> "\\" ++ [c]) <$> elements "abfnrtv"
normal = suchThat
-- Restrict the range of characters to those that are printable.
(((:[]) . chr) <$> choose (33, 126))
-- Don't pick a character that's illegal in a rune.
(\c -> c /= "'" && c /= "\\")
-- | Generates raw string literals.
rawStringGen :: Gen String
rawStringGen = (surroundWith "`") <$> suchThat arbitrary (\s -> '`' `notElem` s)
-- | Generates interpreted string literals
-- A special generator is required to prevent generating invalid escape codes,
-- or strings containing invalid characters.
interpStringGen :: Gen String
interpStringGen
= (surroundWith "\"") . concat
<$> listOf (frequency [(1, escape), (49, normal)]) where
escape = (\c -> "\\" ++ [c]) <$> elements commonEscapes
normal = suchThat
(((:[]) . chr) <$> choose (33, 126))
(\c -> c /= "\n" && c /= "\"" && c /= "\\")
-- We don't want to introduce illegal characters or escape codes
-- | Generates identifiers.
identGen :: Gen String
identGen = suchThat identGen' notKw where
identGen' = (:) <$> start <*> rest
start = elements $ '_' : ['a'..'z'] ++ ['A'..'Z']
rest = sized . flip replicateM . oneof $ [start, elements ['0'..'9']]
notKw = \c -> c `notElem` ["break", "return", "continue", "fallthrough",
"print", "println", "read", "var", "struct", "type", "if", "else",
"for", "switch", "case", "default", "package", "func"]
-- | Generates types, which may contain other generated types.
typeGen :: Gen BasicType
typeGen = sized typeGen' where
typeGen' 0 = Fix <$> oneof [
liftM NamedType arbitrary,
-- "Constant generator" that always creates empty lists
liftM StructType (MkGen $ \_ _ -> []) ]
-- The types become impossibly long if we don't reduce size exponentially.
typeGen' n = let n' = n `div` 2 in
Fix <$> oneof [
liftM SliceType (typeGen' $ n'),
liftM2 ArrayType arbitrary (typeGen' n'),
liftM StructType (vectorOf n' (fieldGen n'))]
-- | Generates structure fields of a given size
fieldGen :: Int -> Gen (BasicIdent, BasicType)
fieldGen x = liftM2 (,) arbitrary (resize 2 arbitrary)
instance Arbitrary BasicIdent where
arbitrary = Ident <$> identGen
instance Arbitrary (Identity GoInt) where
-- We don't have negative literals, only unary-minus expressions
-- Also in order to avoid some errors, we restrict the range to 1024
arbitrary = Identity <$> ((flip mod) 1024) <$> arbitraryPositiveIntegral
instance Arbitrary BasicType where
arbitrary = typeGen
instance Arbitrary BasicLiteral where
-- Use our own generators for strings and runes to ensure we generate valid
-- escape codes. Also make sure that literals are positive.
arbitrary = oneof [ liftM IntLit arbitraryPositiveIntegral,
liftM FloatLit (abs <$> arbitrary),
liftM StringLit (unsurround interpStringGen),
liftM RuneLit (fmap runeToChar runeGen)]
where
unsurround = fmap (reverse . tail . reverse . tail)
runeToChar s = let c = s !! 1 in case c of
'\\' -> escapedChars Map.! (s !! 2)
_ -> c
-- | Generates an arbitrary positive integer.
arbitraryPositiveIntegral :: Gen GoInt
arbitraryPositiveIntegral = abs <$> arbitraryBoundedIntegral
| djeik/goto | test/Gen/Literal.hs | mit | 5,183 | 0 | 16 | 1,358 | 1,317 | 736 | 581 | 94 | 2 |
primo :: Int -> Int -> Bool
primo n 1 = True
primo n m = if (n `rem` m) == 0 then False else primo n (m-1)
main = do
n <- fmap read getLine
putStrLn $ show (primo n (n-1))
| folivetti/PI-UFABC | AULA_04/Haskell/Primo.hs | mit | 187 | 0 | 12 | 57 | 109 | 56 | 53 | 6 | 2 |
module Hage.Geometry.Types where
type Point = (Float, Float)
type Vector = (Float, Float)
type Circle = (Point, Float)
type Size = (Float, Float)
type Rect = (Point, Size)
data Clash = Horizontal | Vertical
deriving (Eq)
getWidth :: Size -> Float
getWidth = fst
getHeight :: Size -> Float
getHeight = snd
| Hinidu/Arkanoid | src/Hage/Geometry/Types.hs | mit | 317 | 0 | 6 | 64 | 112 | 71 | 41 | 12 | 1 |
-- | Implements Figure 8-1.
module Language.TaPL.TypedBoolean (eval, eval', parseString, parseFile, Term(..), typeOf) where
import Language.TaPL.TypedBoolean.Syntax (Term(..))
import Language.TaPL.TypedBoolean.Parser (parseString, parseFile)
import Language.TaPL.TypedBoolean.Eval (eval, eval')
import Language.TaPL.TypedBoolean.Types (typeOf)
| zeckalpha/TaPL | src/Language/TaPL/TypedBoolean.hs | mit | 345 | 0 | 6 | 29 | 90 | 61 | 29 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module StreamMultiplexSpec (main, spec) where
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Monadic (PropertyM, assert, monadicIO, pick, pre, run)
import Data.Conduit as DC
import Data.Conduit.List as DC
import Prelude as P
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Concurrent.STM.TQueue
import Control.Concurrent
import Control.Exception hiding (assert)
import Control.Monad.IO.Class
import Data.Maybe as M
import Data.ByteString as BS
import Data.Text as T
import System.Log.Logger
import Data.ByteString.Char8 as BSC
import Control.Monad
import Control.Monad.Trans.Resource
import Network.Curl
import Network.Curl.Opts
import Network.Wai.Application.Static
import Network.Wai.Handler.Warp as Warp
{-
import Control.Monad.ST
-}
import Filesystem.Path.CurrentOS
import Network.BitSmuggler.Utils
import Network.BitSmuggler.StreamMultiplex
import Network.BitSmuggler.Proxy.Client as Proxy
import Network.BitSmuggler.Proxy.Server as Proxy
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "mutiplex" $ do
it "mutiplexes 1 connection" $ do -- easiest test
P.putStrLn "todo"
quickCheckWith stdArgs { maxSuccess = 50 } $ streamsBothWays
return ()
it "mutiplexes many connections" $ do
P.putStrLn "todo"
return ()
describe "mux tcp proxy" $ do
it "proxies http requests for static files" $ P.putStrLn "wtf"
>> (testHTTPProxy `catchAny` (\e -> debugM logger $ "EXCEPTION :" ++ show e))
return ()
testHTTPProxy = void $ forM [1..10] $ \i -> runResourceT $ do
-- liftIO $ updateGlobalLogger logger (setLevel DEBUG)
let root = "test-data/test-server-root"
let serverPort = 3333
let proxyPort = 1080
let app = staticApp $ defaultWebAppSettings (fromText root)
allocAsync $ async $ Warp.run serverPort app
(clientConnData, serverConnData) <- liftIO $ initSTMConnData
allocLinkedAsync $ async
$ Proxy.proxyServer serverConnData `catchAny` (\e -> do
debugM logger $ "terminated the server thread " ++ show e
throwIO e)
allocLinkedAsync $ async
$ (Proxy.proxyClient proxyPort clientConnData) `catchAny` (\e -> do
debugM logger $ "terminated the client thread " ++ show e
throwIO e)
liftIO $ waitForServer (BSC.pack localhost) (fromIntegral serverPort)
liftIO $ waitForServer (BSC.pack localhost) (fromIntegral proxyPort)
liftIO $ debugM logger "the servers are available. continuing with testing ..."
-- run concurrent requests
results <- liftIO $ (P.flip mapConcurrently)
(P.take 10 $ P.cycle ["tinyFile.txt", "tinyFile0.txt", "tinyFile1.txt"])
$ \fileName -> do
let fullPath = (fromText root) </> (fromText fileName)
contents <- liftIO $ P.readFile (T.unpack $ fromRight $ toText fullPath)
(code, proxiedContents) <- liftIO $ curlGetString
(localhost ++ ":" ++ (show serverPort) ++ "/" ++ T.unpack fileName)
[Network.Curl.Opts.CurlProxy $ "socks4://127.0.0.1:" ++ (show proxyPort)]
debugM logger "evaluate the results"
code `shouldBe` CurlOK
proxiedContents `shouldBe` contents
liftIO $ debugM logger "DONE RUNNING TEST"
return ()
streamsBothWays arbData1 arbData2
= monadicIO $ testStream (toInputData arbData1) (toInputData arbData2)
where
toInputData = P.map BS.pack
testStream :: [ByteString] -> [ByteString] -> PropertyM IO ()
testStream clientToServer serverToClient = do
-- setting up 2 way channel
(clientConnData, serverConnData) <- liftIO $ initSTMConnData
clientResult <- liftIO $ newEmptyTMVarIO
serverResult <- liftIO $ newEmptyTMVarIO
tid <- liftIO $ forkIO $ void $ concurrently
(runClient clientConnData
$ (\initConn -> initConn
(\connData -> streamAndValidate connData serverToClient clientToServer
>>= (\r -> atomically $ putTMVar clientResult r) )))
(runServer serverConnData
(\connData -> streamAndValidate connData clientToServer serverToClient
>>= (\r -> atomically $ putTMVar serverResult r) ))
clientSendsPayload <- liftIO $ atomically $ takeTMVar clientResult
serverSendsPayload <- liftIO $ atomically $ takeTMVar serverResult
assert clientSendsPayload
assert serverSendsPayload
liftIO $ killThread tid
return ()
initSTMConnData = do
toServer <- newTQueueIO
toClient <- newTQueueIO
let clientConnData = ConnData {connSource = toProducer $ sourceTQueue toClient
, connSink = sinkTQueue toServer}
let serverConnData = ConnData {connSource = toProducer $ sourceTQueue toServer
, connSink = sinkTQueue toClient}
return (clientConnData, serverConnData)
streamAndValidate connData recvData sendData
= fmap fst $ concurrently
((connSource connData $$ DC.consume)
>>= (\out -> return $ BS.concat out == BS.concat recvData))
(DC.sourceList sendData $$ (connSink connData))
| danoctavian/bit-smuggler | BitSmuggler/test/unit/StreamMultiplexSpec.hs | gpl-2.0 | 5,007 | 0 | 23 | 1,010 | 1,419 | 733 | 686 | 112 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
-- | like Maybe, but Nothing is shown as question mark
module String_Matching.Option where
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Set
import Data.Typeable
data Option a = Yes a | No
deriving ( Eq, Ord )
instance ToDoc a => ToDoc ( Option a ) where
toDoc ( Yes a ) = toDoc a
toDoc No = text "?"
instance Reader a => Reader ( Option a ) where
reader = do my_symbol "?" ; return No
<|> do x <- reader ; return $ Yes x
yes :: [ Option a ] -> Int
yes xs = sum $ do Yes x <- xs ; return 1
class Sub a b where
sub :: a -> b -> Bool
inject :: b -> a
instance Eq a => Sub ( Option a ) a where
sub No _ = True
sub ( Yes x ) y = x == y
inject = Yes
instance Eq a => Sub ( Option a ) ( Option a ) where
sub No _ = True
sub ( Yes x ) ( Yes y ) = x == y
sub _ _ = False
inject = id
instance Sub a b => Sub [a] [b] where
sub [] [] = True
sub (x:xs) (y:ys) = sub x y && sub xs ys
sub _ _ = False
inject = fmap inject
instance (Ord a, Ord b, Sub a b) => Sub (Set a) (Set b) where
sub xs ys = sub ( setToList xs ) ( setToList ys )
inject = smap inject
| Erdwolf/autotool-bonn | src/String_Matching/Option.hs | gpl-2.0 | 1,218 | 0 | 10 | 371 | 552 | 278 | 274 | 36 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Sql where
import Data.List (intercalate)
import DataModel
data Select = Select [SelectExpr] TableExpr (Maybe Expr)
-- , grouping :: GroupBy
-- , ordering :: OrderBy
-- , limit :: Limit
deriving (Eq, Show)
data SelectExpr = SelectExpr Expr
| AliasedExpr SelectExpr String
deriving (Eq, Show)
data TableExpr = NoTable
| TableRefExpr TableRef
| InnerJoin TableExpr TableRef (Maybe Expr)
| LeftJoin TableExpr TableRef (Maybe Expr)
deriving (Eq, Show)
data TableRef = JustTable Table
| TableSubQuery Select
| AliasedTableRef TableRef String
deriving (Eq, Show)
data Expr = OrExpr Expr Expr
| XorExpr Expr Expr
| AndExpr Expr Expr
| NotExpr Expr
-- | IsExpr { primary :: BoolPrimary, isNot :: Bool, isVal :: TrueFalseUnknown }
| IsNull Expr
| NullSafeEq Expr Expr
| CompEq Expr Expr
| CompGE Expr Expr
| CompGT Expr Expr
| CompLE Expr Expr
| CompLT Expr Expr
| CompNE Expr Expr
| PredInSubQuery Expr Select --{ subquery :: Select, isNot :: Bool }
| PredInList Expr [Expr] -- { exprs :: [Expr], isNot :: Bool }
| PredBetween Expr Expr Expr
| PredLike Expr Expr
| PredRegEx Expr Expr
| BitExprOr Expr Expr
| BitExprAnd Expr Expr
| BitExprLeftShift Expr Expr
| BitExprRightShift Expr Expr
| BitExprAdd Expr Expr
| BitExprSub Expr Expr
| BitExprMul Expr Expr
| BitExprDiv Expr Expr
| BitExprIntDiv Expr Expr
| BitExprMod Expr Expr
| BitExprXor Expr Expr
-- | BitExprAddIntv BitExpr IntvExpr
-- | BitExprSubIntv BitExpr IntvExpr
| LiteralExpr Literal
| FieldExpr Field TableRef
| FuncExpr FunctionCall
| NegExpr Expr
| BitInvExpr Expr
| TupleExpr [Expr]
| SubqueryExpr Select
| ExistsExpr Select
-- | CaseExpr Case
deriving (Eq, Show)
data Literal = StringLit String
| NumberLit String
deriving (Eq, Show)
data FunctionCall = FunctionCall String [Expr] deriving (Eq, Show)
class Compilable a where
compile :: a -> String
instance Compilable Literal where
compile (StringLit s) = "'" ++ s ++ "'"
compile (NumberLit s) = s
instance Compilable FunctionCall where
compile (FunctionCall name args) = name ++ "(" ++
(intercalate ", " $ map compile args) ++
")"
instance Compilable Expr where
compile (CompEq left right) = (compile left) ++ " = " ++ (compile right)
compile (LiteralExpr lit) = compile lit
compile (FieldExpr f (AliasedTableRef _ alias)) = alias ++ "." ++ (column f)
compile (FieldExpr f (JustTable t)) = (tableName t) ++ "." ++ (column f)
instance Compilable SelectExpr where
compile (SelectExpr expr) = compile expr
compile (AliasedExpr expr alias) = (compile expr) ++ " AS " ++ alias
instance Compilable TableExpr where
compile (TableRefExpr t) = compile t
compile (InnerJoin left right condition) =
(compile left) ++ " JOIN " ++ (compile right) ++ case condition of
Nothing -> ""
Just expr -> " ON " ++ (compile expr)
instance Compilable TableRef where
compile (JustTable t) = tableName t
compile (TableSubQuery s) = compile s
compile (AliasedTableRef tr alias) = (compile tr) ++ " AS " ++ alias
instance Compilable Select where
compile (Select exprs tables filter) =
"SELECT " ++ (intercalate ", " $ map compile exprs) ++ " FROM " ++
(compile tables) ++ (comp_where filter)
where comp_where Nothing = ""
comp_where (Just expr) = "WHERE " ++ compile expr
data Query a = Query Select a deriving (Eq, Show)
instance Monad Query where
return a = Query (Select [] NoTable Nothing) a
(>>=) (Query (Select exprs tableExpr whereExpr) a) f = (Query newSelect b)
where newSelect = Select (exprs ++ moreExprs) newTableExpr whereExpr
newTableExpr = case otherTableExpr of
NoTable -> tableExpr
TableRefExpr t -> InnerJoin tableExpr t Nothing
InnerJoin NoTable t on -> InnerJoin tableExpr t on
(Query (Select moreExprs otherTableExpr otherWhereExpr) b) = f a
class Selectable a where
mkSelectExpr :: a -> SelectExpr
instance Selectable String where
mkSelectExpr = SelectExpr . LiteralExpr . StringLit
select :: Selectable a => a -> Query SelectExpr
select a = Query (Select [expr] NoTable Nothing) expr
where expr = mkSelectExpr a
class Fromable a where
makeTableRef :: a -> TableRef
instance Fromable Table where
makeTableRef = JustTable
instance Fromable TableRef where
makeTableRef = id
from :: Fromable a => a -> Query TableRef
from a = Query (Select [] (TableRefExpr tableRef) Nothing) tableRef
where tableRef = makeTableRef a
join :: Fromable a => a -> (TableRef -> Expr) -> Query TableRef
join from on = Query (Select [] joinRef Nothing) tableRef
where tableRef = makeTableRef from
joinRef = InnerJoin NoTable tableRef $ Just $ on tableRef
instance Compilable (Query a) where
compile (Query select _) = compile select
class TableLike a where
getField :: a -> String -> Expr
instance TableLike TableRef where
getField tableRef@(JustTable t) fieldName =
FieldExpr (getFieldFromTable t fieldName) tableRef
getField tableRef@(AliasedTableRef (JustTable t) _) fieldName =
FieldExpr (getFieldFromTable t fieldName) tableRef
alias :: Fromable a => a -> String -> TableRef
alias entity alias = alias' (makeTableRef entity)
where alias' (AliasedTableRef tr _) = AliasedTableRef tr alias
alias' tr = AliasedTableRef tr alias
query dm = do
co <- from $ (alias (getTable dm "CollectionObject") "co")
agent <- join (alias (getTable dm "Agent") "a") (
\agent -> CompEq (getField agent "agentId") (getField co "collectionObjectId"))
select "foo"
| benanhalt/haskify | Sql.hs | gpl-2.0 | 6,251 | 0 | 13 | 1,796 | 1,844 | 951 | 893 | 141 | 2 |
{-# language FlexibleContexts, GeneralizedNewtypeDeriving, DeriveFunctor, DeriveGeneric #-}
{-# language OverloadedStrings #-}
{-# language MultiParamTypeClasses #-}
{-# language FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Iterative
-- Copyright : (c) Marco Zocca 2017-2018
-- License : GPL-style (see the file LICENSE)
--
-- Maintainer : zocca marco gmail
-- Stability : experimental
-- Portability : portable
--
-- Combinators and helper functions for iterative algorithms, with support for monitoring and exceptions.
--
-----------------------------------------------------------------------------
module Control.Iterative where
import Control.Applicative
import Control.Monad (when, replicateM)
import Control.Monad.Reader (MonadReader(..), asks)
import Control.Monad.State.Strict (MonadState(..), get, put, gets)
import Control.Monad.Trans.Class (MonadTrans(..), lift)
import Control.Monad.Trans.State.Strict (StateT(..), runStateT, execStateT)
import Control.Monad.Trans.Reader (ReaderT(..), runReaderT)
import Control.Monad.Catch (Exception(..), MonadThrow(..), throwM)
import Control.Monad.Log (MonadLog(..), WithSeverity(..), Severity(..), renderWithSeverity, LoggingT(..), runLoggingT, Handler, logMessage, logError, logDebug, logInfo, logNotice)
-- import Data.Bool (bool)
import Data.Char (toUpper)
import Data.Semigroup
import Data.Monoid (Sum(..), Product(..))
import Data.Typeable
import qualified Control.Exception as E (Exception, Handler)
import Data.Foldable (foldrM)
import GHC.Generics
import Control.Exception.Common
import Control.Iterative.Internal
import Numeric.LinearAlgebra.Class
import Numeric.Eps
-- * ITERATION
-- | Iteration state machine
data ConvergenceStatus s a =
BufferNotReady
| Converging
| Converged s -- ^ Final state
| Diverging a a
| NotConverged s -- ^ Final state
deriving (Eq, Show)
-- | Configuration data for the iterative process
data IterConfig s t msg m = IterConfig {
icFunctionName :: String -- ^ Name of calling function, for logging purposes
, icNumIterationsMax :: Int -- ^ Max # of iterations
, icStateWindowLength :: Int -- ^ # of states used to assess convergence/divergence
, icStateProj :: s -> t -- ^ Project the state
, icLogHandler :: Handler m (WithSeverity msg) -- ^ Logging handler
-- , icLogWith :: s -> (Severity, msg) -- ^ Compute log severity and message
} deriving Generic
-- | Configuration for numerical convergence
--
-- This can be used to specify convenient defaults for convergence in e.g. L2
data ConvergConfig t a = ConvergConfig {
ccStateSummary :: [t] -> a -- ^ Produce a summary from a list of state projections
, ccStateConverging :: a -> Bool -- ^ Are we converging ?
, ccStateDiverging :: a -> a -> Bool -- ^ Are we diverging ?
, ccStateFinal :: t -> Bool -- ^ Has the state converged ?
}
convergenceL2 :: Normed v => (v -> Bool) -> ConvergConfig v (Magnitude v)
convergenceL2 = ConvergConfig norm2Diff nearZero (>)
-- -- | Build an 'IterConfig'
-- mkIterConfig :: String
-- -> Int
-- -> Int
-- -> (s -> t)
-- -> ([t] -> a)
-- -> (a -> Bool)
-- -> (a -> a -> Bool)
-- -> (t -> Bool)
-- -> Handler m (WithSeverity msg)
-- -> (s -> (Severity, msg))
-- -> IterConfig s t msg m a
-- mkIterConfig = IterConfig
-- class MonadState s m => MonadStateBuffer s m where
-- -- getBuffer :: s -> m (Maybe [a])
-- -- getBuffer :: s -> m (LoopState a)
-- getBuffer :: m (LoopState s)
-- putBuffer :: LoopState s -> m a
-- baz n f = do
-- -- s <- get
-- -- sb <- getBuffer s
-- sb <- getBuffer
-- let sb' = f $ getBuffers n sb
-- putBuffer sb'
updBuffer n snew (LoopState i ls s)
| n <= 0 = Nothing
| length ls < n = Just $ LoopState (i+1) (s : ls) snew
| otherwise = Just $ LoopState (i+1) (s : take n ls) snew
data StateBuffer s = StateBuffer { sbPrevStates :: [s]
, sbCurrentState :: s } deriving (Eq, Show)
-- instance Semigroup (StateBuffer s) where
initStateBuffer :: s -> StateBuffer s
initStateBuffer = StateBuffer []
-- reconstructStateBuffer n (StateBuffer _ ls s)
-- | length ls < n || n <= 0 = Nothing
-- | otherwise = Just buffer where
-- buffer = s : take n ls
-- mkStateBuffer :: Int -> [s] -> s -> Maybe (StateBuffer s)
-- mkStateBuffer n ls s
-- | length ls < n = Nothing
-- | length ls == n = Just $ StateBuffer ls s
-- | otherwise = Just $ StateBuffer (take n ls) s
-- | A record to keep track of the current iteration, a list of the prior states and the current state.
data LoopState s = LoopState { lsCounter :: !Int
, lsPrevStates :: [s]
, lsCurrentState :: s } deriving (Eq, Show)
-- | Reconstruct state buffer for convergence/divergence estimation
getBuffers :: Int -> LoopState a -> Maybe [a]
getBuffers n (LoopState _ ls s)
| length ls < n || n <= 0 = Nothing
| otherwise = Just buffer where
buffer = s : take n ls
-- | Construct the initial LoopState with 'lsCounter' = 0, 'lsPrevStates' = []
mkLoopState :: s -> LoopState s
mkLoopState = LoopState 0 []
-- | Configurable iteration combinator, with convergence monitoring and logging
modifyInspectGuardedM :: (MonadThrow m, Show a, Typeable a, Show t, Typeable t) =>
ConvergConfig t a
-> IterConfig s t msg m
-> (s -> m s)
-> s
-> m s
modifyInspectGuardedM (ConvergConfig sf qconverg qdiverg qfinal) r f x0
| nitermax > 0 = run
| otherwise = throwM (NonNegError fname nitermax)
where
(IterConfig fname nitermax lwindow pf lh) = r
updState snew (LoopState i lss s) = LoopState (i + 1) lssUpd snew
where
lss' = s : lss
lssUpd | length lss < lwindow = lss'
| otherwise = take lwindow lss'
run = do
let s0 = mkLoopState x0
(aLast, sLast) <- runIterativeT lh r s0 loop
let i = lsCounter sLast
case aLast of
Left (NotConverged y) -> throwM $ NotConvergedE fname nitermax (pf y)
Left (Diverging qi qt) -> throwM $ DivergingE fname i qi qt
Right x -> pure x
-- _ -> throwM $ IterE fname "asdf"
loop = do
s@(LoopState i _ x) <- get
y <- lift $ f x
let
s' = updState y s
status = case getBuffers lwindow s' of
Nothing -> BufferNotReady
Just buffer -> stat
where
llf = pf `map` buffer
qi = sf $ init llf -- summary of [lwindow + 1 .. 0] states
qt = sf $ tail llf -- " " [lwindow .. 1] states
stat | qdiverg qi qt && not (qconverg qi) = Diverging qi qt
| qconverg qi || qfinal (pf y) = Converged y
| i == nitermax - 1 = NotConverged y
| otherwise = Converging
case status of
BufferNotReady -> do
put s'
loop
Converging -> do
-- logWith lwf y
put s'
loop
Diverging qi qt ->
pure $ Left (Diverging qi qt)
Converged qi ->
pure $ Right qi
NotConverged yy ->
pure $ Left (NotConverged yy)
-- -- baz :: StateBuffer s m => (s -> Maybe [a] -> s) -> m ()
-- baz f = do
-- s <- get
-- sb <- getBuffer s
-- let s' = f sb
-- put s'
-- * Control primitives for bounded iteration with convergence check
-- -- | transform state until a condition is met
modifyUntil :: MonadState s m => (s -> Bool) -> (s -> s) -> m s
modifyUntil q f = modifyUntilM q (pure . f)
modifyUntilM :: MonadState s m => (s -> Bool) -> (s -> m s) -> m s
modifyUntilM q f = do
x <- get
y <- f x
put y
if q y then return y
else modifyUntilM q f
modifyUntilM_ :: MonadState s m => (s -> Bool) -> (s -> m s) -> m s
modifyUntilM_ q f = do
x <- get
y <- f x
if q y
then pure y
else do
put y
modifyUntilM_ q f
-- -- | modifyUntil with optional iteration logging to stdout
-- -- modifyUntil' :: MonadLog String m =>
-- -- IterationConfig a b -> (a -> Bool) -> (a -> a) -> a -> m a
-- modifyUntil' config q f x0 = modifyUntilM' config q (pure . f) x0
-- modifyUntilM' :: MonadLog String m =>
-- IterationConfig a b -> (a -> Bool) -> (a -> m a) -> a -> m a
modifyUntilM' config q f x0 = execStateT (go 0) x0 where
-- logf ii = (Informational, unwords ["Iteration", show ii, "\n"])
go i = do
x <- get
y <- lift $ f x
-- logWith (icLogWith config) i
put y
if q y
then return y
else go (i + 1)
-- -- | `untilConvergedG0` is a special case of `untilConvergedG` that assesses convergence based on the L2 distance to a known solution `xKnown`
-- -- untilConvergedG0 :: (Show p, MonadThrow m, Typeable v, Typeable (Magnitude v), Normed v) =>
-- -- IterConfig s v msg m a
-- -- -> v -> (s -> s) -> s -> m s
-- untilConvergedG0 config xKnown f x0 =
-- modifyInspectGuarded config' f x0
-- where
-- config' = config {
-- icStateSummary = norm2Diff
-- , icStateConverging = nearZero
-- , icStateDiverging = (>)
-- , icStateFinal = \s -> nearZero $ norm2 (xKnown ^-^ s)
-- }
-- | This function makes some default choices on the `modifyInspectGuarded` machinery: convergence is assessed using the squared L2 distance between consecutive states, and divergence is detected when this function is increasing between pairs of measurements.
-- untilConvergedG :: (Show v, Epsilon v, MonadThrow m, Typeable v, Typeable (Magnitude v), Normed v) =>
-- Handler m (WithSeverity msg)
-- -> String
-- -> Int
-- -> Int
-- -> (s -> v)
-- -> (s -> (Severity, msg))
-- -> (s -> s)
-- -> s
-- -> m s
-- untilConvergedG fh fname nitermax lwindow fp flog =
-- modifyInspectGuarded config
-- where
-- config = mkL2ConvergenceIterConf fname nitermax lwindow fp qfinal fh flog
-- qfinal = nearZero
-- untilConvergedGM fname config =
-- modifyInspectGuardedM fname config norm2Diff nearZero (>)
-- -- | Create a configuration for L2 convergence:
-- --
-- -- state summary : squared distance of vector sequence
-- -- convergence criterion : " ~= zero
-- -- divergence criterion : current summary is > previous one
-- mkL2ConvergenceIterConf :: Normed v =>
-- String -- ^ Function name
-- -> Int -- ^ Max # iterations
-- -> Int -- ^ Buffer size
-- -> (s -> v) -- ^ State projection
-- -> (v -> Bool) -- ^ Termination criterion
-- -> Handler m (WithSeverity msg) -- ^ Logging handler
-- -> (s -> (Severity, msg)) -- ^ Log formatting
-- -> IterConfig s v msg m (Magnitude v)
-- mkL2ConvergenceIterConf fname nitermax lwindow fp =
-- mkIterConfig fname nitermax lwindow fp norm2Diff nearZero (>)
-- -- | Pure version of 'modifyInspectGuardedM'
-- modifyInspectGuarded :: (MonadThrow m, Show t, Show a, Typeable t, Typeable a) =>
-- Handler m (WithSeverity msg) -- ^ Logging handler
-- -> IterConfig s t msg a -- ^ Configuration
-- -> (s -> s) -- ^ State evolution
-- -> s -- ^ Initial state
-- -> m s -- ^ Final state
-- modifyInspectGuarded config f x0 = modifyInspectGuardedM config (pure . f) x0
-- * LOGGING
-- | Log with a function that computes a severity and a message from the input
logWith :: MonadLog (WithSeverity a) m => (p -> (Severity, a)) -> p -> m ()
logWith f x = logMessage (WithSeverity sev sevMsg) where
(sev, sevMsg) = f x
bracketsUpp :: Show a => a -> String
bracketsUpp p = unwords ["[", map toUpper (show p), "]"]
withSeverity :: (t -> String) -> WithSeverity t -> String
withSeverity k (WithSeverity u a ) = unwords [bracketsUpp u, k a]
-- -- >>> renderWithSeverity id (WithSeverity Informational "Flux capacitor is functional")
-- -- [Informational] Flux capacitor is functional
-- renderWithSeverity
-- :: (a -> PP.Doc) -> (WithSeverity a -> PP.Doc)
-- renderWithSeverity k (WithSeverity u a) =
-- PP.brackets (PP.pretty u) PP.<+> PP.align (k a)
-- | Some useful combinators
-- | Apply a function over a range of integer indices, zip the result with it and filter out the almost-zero entries
onRangeSparse :: Epsilon b => (Int -> b) -> [Int] -> [(Int, b)]
onRangeSparse f ixs = foldr ins [] ixs where
ins x xr | isNz (f x) = (x, f x) : xr
| otherwise = xr
-- | ", monadic version
onRangeSparseM :: (Epsilon b, Foldable t, Monad m) =>
(a -> m b) -> t a -> m [(a, b)]
onRangeSparseM f ixs = unfoldZipM mf f ixs where
mf x = isNz <$> f x
unfoldZipM0 :: (Foldable t, Monad m) =>
(a -> Bool) -> (a -> b) -> t a -> m [(a, b)]
unfoldZipM0 q f = unfoldZipM (pure . q) (pure . f)
unfoldZipM :: (Foldable t, Monad m) =>
(a -> m Bool) -> (a -> m b) -> t a -> m [(a, b)]
unfoldZipM q f ixs = foldrM insf [] ixs where
insf x xr = do
qx <- q x
if qx
then do
y <- f x
pure $ (x, y) : xr
else pure xr
-- | A combinator I don't know how to call
combx :: Functor f => (a -> b) -> (t -> f a) -> t -> f b
combx g f x = g <$> f x
-- | Helpers
sqDiffPairs :: Num a => (v -> v -> a) -> [v] -> a
sqDiffPairs f uu = sqDiff f (init uu) (tail uu)
sqDiff :: Num a => (u -> v -> a) -> [u] -> [v] -> a
sqDiff f uu vv = sum $ zipWith f uu vv
-- | Relative residual
relRes :: (Normed t, LinearVectorSpace t) =>
MatrixType t -> t -> t -> Magnitude t
relRes aa b x = n / d where
n = norm2 $ (aa #> x) ^-^ b
d = norm2 b
-- meanl :: (Foldable t, Fractional a) => t a -> a
-- meanl xx = 1/fromIntegral (length xx) * sum xx
-- norm2l :: (Foldable t, Functor t, Floating a) => t a -> a
-- norm2l xx = sqrt $ sum (fmap (**2) xx)
-- | Squared difference of a 2-element list.
-- | NB: unsafe !
diffSqL :: Floating a => [a] -> a
diffSqL xx = (x1 - x2)**2 where [x1, x2] = [head xx, xx!!1]
-- | Relative tolerance :
-- relTol a b := ||a - b|| / (1 + min (||norm2 a||, ||norm2 b||))
relTol :: Normed v => v -> v -> Magnitude v
relTol a b = norm2 (a ^-^ b) / m where
m = 1 + min (norm2 a) (norm2 b)
-- | NB: use it on a list of >= 2 elements !!
norm2Diff :: Normed v => [v] -> Magnitude v
norm2Diff v = sum $ zipWith f va vb where
f v1 v2 = norm2 $ v1 ^-^ v2
va = init v
vb = tail v
| ocramz/sparse-linear-algebra | src/Control/Iterative.hs | gpl-3.0 | 15,134 | 0 | 23 | 4,547 | 3,160 | 1,716 | 1,444 | 191 | 8 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
module SIR.Event where
import Data.Maybe
import Control.Monad.Random
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.Trans.MSF.Except
import Data.MonadicStreamFunction.InternalCore
import Data.MonadicStreamFunction
import qualified Data.IntMap.Strict as Map
import qualified Data.PQueue.Min as PQ
import SIR.Model
--import Debug.Trace
type Time = Double
type AgentId = Int
data QueueItem e = QueueItem e !AgentId !Time deriving Show
type EventQueue e = PQ.MinQueue (QueueItem e)
instance Eq (QueueItem e) where
(==) (QueueItem _ _ t1) (QueueItem _ _ t2) = t1 == t2
instance Ord (QueueItem e) where
compare (QueueItem _ _ t1) (QueueItem _ _ t2) = compare t1 t2
type ABSMonad m e = ReaderT Time (WriterT [QueueItem e] (ReaderT [AgentId] m))
type AgentMSF m e o = MSF (ABSMonad m e) e o
type Agent m e o = AgentId -> (ABSMonad m e) (AgentMSF m e o)
type AgentMap m e o = Map.IntMap (AgentMSF m e o, o)
data SIREvent
= MakeContact
| Contact AgentId SIRState
| Recover
deriving (Show, Eq)
type SIRMonad g = Rand g
type SIRMonadT g = ABSMonad (SIRMonad g) SIREvent
type SIRAgent g = Agent (SIRMonad g) SIREvent SIRState
type SIRAgentMSF g = AgentMSF (SIRMonad g) SIREvent SIRState
type SIRAgentMap g = AgentMap (SIRMonad g) SIREvent SIRState
makeContactInterval :: Double
makeContactInterval = 1.0
--------------------------------------------------------------------------------
-- AGENT CONSTRUCTOR
--------------------------------------------------------------------------------
-- | A sir agent which is in one of three states
sirAgent :: RandomGen g
=> Int -- ^ the contact rate
-> Double -- ^ the infectivity
-> Double -- ^ the illness duration
-> SIRState -- ^ the initial state of the agent
-> SIRAgent g -- ^ the continuation
sirAgent cor inf ild Susceptible aid = do
-- on start
scheduleMakeContact aid makeContactInterval
return $ susceptibleAgent aid cor inf ild
sirAgent _ _ ild Infected aid = do
-- on start
scheduleRecovery aid ild
return $ infectedAgent aid
sirAgent _ _ _ Recovered _ =
return recoveredAgent
--------------------------------------------------------------------------------
-- AGENTS
--------------------------------------------------------------------------------
susceptibleAgent :: RandomGen g
=> AgentId
-> Int
-> Double
-> Double
-> SIRAgentMSF g
susceptibleAgent aid cor inf ild =
switch
susceptibleAgentInfected
(const $ infectedAgent aid)
where
susceptibleAgentInfected :: RandomGen g
=> MSF
(SIRMonadT g)
SIREvent
(SIRState, Maybe ())
susceptibleAgentInfected = proc e -> do
ret <- arrM handleEvent -< e
case ret of
Nothing -> returnA -< (Susceptible, ret)
_ -> returnA -< (Infected, ret)
handleEvent :: RandomGen g => SIREvent -> (SIRMonadT g) (Maybe ())
handleEvent (Contact _ Infected) = do
r <- (lift . lift . lift) (randomBoolM inf)
if r
then do
scheduleRecovery aid ild
return $ Just ()
else return Nothing
handleEvent MakeContact = do
ais <- allAgentIds
--corExp <- lift $ lift $ lift $ randomExpM (1 / fromIntegral cor)
receivers <- lift $ lift $ lift $ forM [1..cor] (const $ randomElem ais)
mapM_ makeContactWith receivers
scheduleMakeContact aid makeContactInterval
return Nothing
handleEvent _ = return Nothing
makeContactWith :: AgentId -> (SIRMonadT g) ()
makeContactWith receiver =
scheduleEvent receiver (Contact aid Susceptible) 0.0
infectedAgent :: AgentId -> SIRAgentMSF g
infectedAgent aid =
switch
infectedAgentRecovered
(const recoveredAgent)
where
infectedAgentRecovered :: MSF
(SIRMonadT g)
SIREvent
(SIRState, Maybe ())
infectedAgentRecovered = proc e -> do
ret <- arrM handleEvent -< e
case ret of
Nothing -> returnA -< (Infected, ret)
_ -> returnA -< (Recovered, ret)
handleEvent :: SIREvent -> (SIRMonadT g) (Maybe ())
handleEvent (Contact sender Susceptible) = do
replyContact sender
return Nothing
handleEvent Recover = return $ Just ()
handleEvent _ = return Nothing
replyContact :: AgentId -> (SIRMonadT g) ()
replyContact receiver = scheduleEvent receiver (Contact aid Infected) 0.0
recoveredAgent :: SIRAgentMSF g
recoveredAgent = arr (const Recovered)
--------------------------------------------------------------------------------
-- AGENT UTILS
--------------------------------------------------------------------------------
scheduleMakeContact :: RandomGen g => AgentId -> Double -> (SIRMonadT g) ()
scheduleMakeContact aid = scheduleEvent aid MakeContact
scheduleRecovery :: RandomGen g => AgentId -> Double -> (SIRMonadT g) ()
scheduleRecovery aid ild = do
dt <- lift $ lift $ lift $ randomExpM (1 / ild)
scheduleEvent aid Recover dt
allAgentIds :: Monad m => (ABSMonad m e) [AgentId]
allAgentIds = lift $ lift ask
scheduleEvent :: Monad m
=> AgentId
-> e
-> Double
-> (ABSMonad m e) ()
scheduleEvent aid e dt = do
t <- ask
let qe = QueueItem e aid (t + dt)
lift $ tell [qe]
--------------------------------------------------------------------------------
-- SIMULATION KERNEL
--------------------------------------------------------------------------------
-- NOTE: this is implemented in a way that it the output [s] can be treated as
-- an infinite list, which is definitely the case when the simulation does not
-- terminate by itself when running out of events (or no time-/event-limit)
-- This also requires that no function which needs to look at all elements
-- is used, like reverse. Also it means we cannot use an accumulator, and can
-- not use tail-recursion
processQueue :: Monad m
=> Integer
-> Double
-> AgentMap m e o
-> EventQueue e
-> (AgentMap m e o -> Double -> s)
-> ReaderT [AgentId] m [s]
processQueue 0 _ _ _ _ = return [] -- terminated by externals of simulation: hit event limit
processQueue n tLimit am q dsf
| isNothing mayHead = return [] -- terminated by internals of simulation model: no more events
| evtTime > tLimit = return [] -- terminated by externals of simulation: hit time limit
| otherwise = do
retMay <- processEvent am evt
-- receiver not found, remove event and carray on
case retMay of
-- event-receiver not found, next event
Nothing -> processQueue (n-1) tLimit am q' dsf
-- event receiver found
(Just (am', es)) -> do
-- insert new events into queue
let q'' = foldr PQ.insert q' es
-- sample domain-state for current event
let s = dsf am' evtTime
-- non tail-recursive call to support infinite [s]
ss <- processQueue (n-1) tLimit am' q'' dsf
return (s : ss)
where
mayHead = PQ.getMin q
evt = fromJust mayHead
evtTime = eventTime evt
q' = PQ.drop 1 q
eventTime :: QueueItem e -> Time
eventTime (QueueItem _ _ et) = et
processEvent :: Monad m
=> AgentMap m e o
-> QueueItem e
-> ReaderT [AgentId] m
-- no idea why have to use full expansion of AgentMap here...
(Maybe (Map.IntMap (AgentMSF m e o, o), [QueueItem e]))
processEvent as (QueueItem e receiver evtTime)
| isNothing aMay = return Nothing
| otherwise = do
let aReaderTime = unMSF a e
aWriterEvents = runReaderT aReaderTime evtTime
amsf = runWriterT aWriterEvents
((ao, a'), es) <- amsf
let as' = Map.insert receiver (a', ao) as
return $ Just (as', es)
where
aMay = Map.lookup receiver as
(a,_) = fromJust aMay
--------------------------------------------------------------------------------
-- SIR SPECIFIC SIMULATION KERNEL
--------------------------------------------------------------------------------
runEventSIR :: RandomGen g
=> [SIRState]
-> Int
-> Double
-> Double
-> Integer
-> Double
-> g
-> ([(Time, (Int, Int, Int))], Integer)
runEventSIR ss cor inf ild maxEvents tLimit g
= (ds, 0)
where
ds = evalRand executeAgents g
executeAgents = do
(asMap, eq) <- initSIR ss cor inf ild
let asIds = Map.keys asMap
let doms as t = (t, aggregateAgentMap as)
runReaderT (processQueue maxEvents tLimit asMap eq doms) asIds
-- let evtCnt = if maxEvents < 0
-- then -(relEvtCnt + 1)
-- else maxEvents - relEvtCnt
aggregateAgentMap :: SIRAgentMap g -> (Int, Int, Int)
aggregateAgentMap = Prelude.foldr aggregateAgentMapAux (0,0,0)
where
aggregateAgentMapAux :: (AgentMSF (SIRMonad g) SIREvent SIRState, SIRState)
-> (Int, Int, Int)
-> (Int, Int, Int)
aggregateAgentMapAux (_, Susceptible) (s,i,r) = (s+1,i,r)
aggregateAgentMapAux (_, Infected) (s,i,r) = (s,i+1,r)
aggregateAgentMapAux (_, Recovered) (s,i,r) = (s,i,r+1)
initSIR :: RandomGen g
=> [SIRState]
-> Int
-> Double
-> Double
-> SIRMonad g (SIRAgentMap g, EventQueue SIREvent)
initSIR ss cor inf ild = do
let asEvtWriter = runReaderT (sequence asWIds) 0
asAsIdsReader = runWriterT asEvtWriter
(as0', es) <- runReaderT asAsIdsReader asIds
let asMap = Prelude.foldr
(\(aid, a, s) acc -> Map.insert aid (a, s) acc)
Map.empty
(Prelude.zip3 asIds as0' ss)
eq = foldr PQ.insert PQ.empty es
return (asMap, eq)
where
as0 = map (sirAgent cor inf ild) ss
asIds = [0.. length ss - 1]
asWIds = Prelude.zipWith (\a aid -> a aid) as0 asIds
--------------------------------------------------------------------------------
-- RANDOM UTILS
--------------------------------------------------------------------------------
randomElem :: MonadRandom m => [e] -> m e
randomElem es = do
let len = length es
idx <- getRandomR (0, len - 1)
return $ es !! idx
randomBoolM :: MonadRandom m => Double -> m Bool
randomBoolM p = getRandomR (0, 1) >>= (\r -> return $ r <= p)
randomExpM :: MonadRandom m => Double -> m Double
randomExpM lambda = avoid 0 >>= (\r -> return ((-log r) / lambda))
where
avoid :: (Random a, Eq a, MonadRandom m) => a -> m a
avoid x = do
r <- getRandom
if r == x
then avoid x
else return r | thalerjonathan/phd | thesis/code/sir/src/SIR/Event.hs | gpl-3.0 | 11,119 | 3 | 17 | 3,154 | 3,087 | 1,595 | 1,492 | 235 | 5 |
-- | A simple logging service. Based on sub-section "MVar as a Simple Channel:
-- A Logging Service"
module Logger ( initLogger
, logMessage
, logStop
) where
import Control.Concurrent
-- The MVar here is used as a way to communicate with the logger service.
-- The logger service is spawned in the `initLogger` command.
data Logger
-- | Handle to the logging service.
initLogger :: IO Logger
initLogger = undefined
-- | Log a message to the standard output.
-- Logging a message causes a side effect, so it makes sense that it returns an
-- IO ().
logMessage :: Logger -> String -> IO ()
logMessage = undefined
-- | Stop the logging process.
-- Logging must not return untill all the outstanding requests have been
-- processed.
logStop :: Logger -> IO ()
logStop = undefined
| capitanbatata/marlows-parconc-exercises | parconc-ch07/src/Logger.hs | gpl-3.0 | 835 | 0 | 8 | 197 | 89 | 55 | 34 | -1 | -1 |
{-|
Module : Css.Constants
Description : Defines the constants for the other CSS modules.
-}
module Css.Constants
(margin0,
padding0,
width100,
height100,
-- * Node and Rectangle Constants
fill,
stroke,
alignCenter,
wideStroke,
faded,
semiVisible,
fullyVisible,
strokeRed,
strokeDashed,
roundCorners,
-- * Colors
theoryDark,
coreDark,
seDark,
systemsDark,
graphicsDark,
dbwebDark,
numDark,
aiDark,
hciDark,
mathDark,
introDark,
titleColour,
lightGrey,
-- * Background Colors
purple1,
purple2,
purple3,
purple4,
purple5,
purple6,
purple7,
purple8,
purple9,
purple10,
pink1,
pink2,
borderNone,
borderPink,
-- * More Node Colors!
teal1,
orange1,
blue1,
blue2,
blue3,
blue4,
blue5,
blue6,
blueFb,
red1,
red2,
red3,
red4,
red5,
green1,
green2,
dRed,
dGreen,
dBlue,
dPurple,
grey1,
grey2,
grey3,
grey4,
grey5,
grey6,
beige1,
-- * Color Palette Colors
pastelRed,
pastelOrange,
pastelYellow,
pastelGreen,
pastelBlue,
pastelPink,
pastelPurple,
pastelBrown,
pastelGrey,
-- * FCE Count Color
fceCountColor,
-- * PostPage Color
darkRose,
softGreen,
mGreen,
mRed,
-- * Graph Styles
nodeFontSize,
hybridFontSize,
boolFontSize,
regionFontSize
) where
import Clay
import Prelude hiding ((**))
import Data.Text as T
-- |Defines CSS for empty margins.
margin0 :: Css
margin0 = margin nil nil nil nil
-- |Defines CSS for empty padding.
padding0 :: Css
padding0 = padding nil nil nil nil
-- |Defines default rectangle width, which is 100%.
width100 :: Css
width100 = width $ pct 100
-- |Defines default rectangle height, which is 100%.
height100 :: Css
height100 = height $ pct 100
{- Node and rectangle constants,
- including sizes, strokes, fills,
- opacities, colors and alignments. -}
-- |Defines "fill" as text for CSS.
fill :: Text -> Css
fill = (-:) "fill"
-- |Defines "stroke" as text for CSS.
stroke :: Text -> Css
stroke = (-:) "stroke"
-- |Defines the CSS for center alignment for a node or rectangle.
alignCenter :: Css
alignCenter = textAlign $ alignSide sideCenter
-- |Defines the CSS for a wide stroke.
wideStroke :: Css
wideStroke = "stroke-width" -: "3"
-- |Defines the CSS for a lower opacity, called faded.
faded :: Css
faded = opacity 0.4
-- |Defines the CSS for a mid-high opacity, but not quite opaque.
semiVisible :: Css
semiVisible = opacity 0.7
-- |Defines the CSS for something that is opaque.
fullyVisible :: Css
fullyVisible = opacity 1.0
-- |Defines the CSS for a strong red stroke.
strokeRed :: Css
strokeRed = do
"stroke" -: "#CC0011"
"stroke-width" -: "2px"
{-|
Defines the CSS for a dashed stroke, with the width between dashes being
a bit smaller than the dash itself.
-}
strokeDashed :: Css
strokeDashed = do
"stroke-dasharray" -: "8,5"
"stroke-width" -: "2px"
-- |Defines the CSS for the rounded corners of a border.
roundCorners :: Css
roundCorners = "border-radius" -: "8px"
{- Colors -}
-- |Defines the color of a grayish blue.
theoryDark :: T.Text
theoryDark = "#B1C8D1"
-- |Defines the color of a light gray.
coreDark :: T.Text
coreDark = "#C9C9C9"
-- |Defines the color of a soft red.
seDark :: T.Text
seDark = "#E68080"
-- |Defines the color of a light violet.
systemsDark :: T.Text
systemsDark = "#C285FF"
-- |Defines the color of a mostly desaturated dark lime green.
graphicsDark :: T.Text
graphicsDark = "#66A366"
-- |Defines the color of a strong pink.
dbwebDark :: T.Text
dbwebDark = "#C42B97"
-- |Defines the color of a very light green.
numDark :: T.Text
numDark = "#B8FF70"
-- |Defines the color of a very light blue.
aiDark :: T.Text
aiDark = "#80B2FF"
-- |Defines the color of a soft lime green.
hciDark :: T.Text
hciDark = "#91F27A"
-- |Defines the color of a slightly desaturated violet.
mathDark :: T.Text
mathDark = "#8A67BE"
-- |Defines the color of a moderate cyan.
introDark :: T.Text
introDark = "#5DD5B8"
-- |Defines the color of a very dark blue.
titleColour :: T.Text
titleColour = "#072D68"
-- |Defines the color of a light gray.
lightGrey :: T.Text
lightGrey = "#CCCCCC"
{- Background colors. -}
-- |Defines the color of a dark grayish magenta, intended for the background.
purple1 :: Color
purple1 = parse "#46364A"
-- |Defines the color of a mostly desaturated dark pink, intended for the
-- background.
purple2 :: Color
purple2 = parse "#7E4D66"
-- |Defines the color of a slightly desaturated magenta, intended for the
-- background.
purple3 :: Color
purple3 = parse "#CD96CD"
-- |Defines the color of a mostly desaturated dark magenta, intended for the
-- background.
purple4 :: Color
purple4 = parse "#9C6B98"
-- |Defines the color of a dark magenta, intended for the background.
purple5 :: Color
purple5 = parse "#800080"
-- |Defines the color of a grayish violet, intended for the background.
purple6 :: Color
purple6 = parse "#CAC4D4"
-- |Defines the color of a dark grayish violet, intended for the background.
purple7 :: Color
purple7 = parse "#9C91B0"
-- |Defines the color of a mostly desaturated dark violet, intended for the
-- background.
purple8 :: Color
purple8 = parse "#7A6A96"
-- |Defines the color of a very dark desaturated violet, intended for the
-- background.
purple9 :: Color
purple9 = parse "#433063"
-- |Defines the color of a mostly desaturated dark violet, intended for the
-- background.
purple10 :: Color
purple10 = parse "#5C497E"
-- |Defines the color of a very soft pink, intended for the background.
pink1 :: Color
pink1 = parse "#DB94B8"
-- |Defines the color of a light grayish pink, intended for the background.
pink2 :: Color
pink2 = rgb 236 189 210
-- |Defines an empty border, making for a flat look.
borderNone :: Css
borderNone = border solid (px 0) white
-- |Defines a border with a color of pink1, intended for the timetable.
borderPink :: (Stroke -> Size LengthUnit -> Color -> Css) -> Css
borderPink borderStroke = borderStroke solid (px 2) pink1
{- More node colours! -}
-- |Defines the color of a dark grayish blue, intended for nodes.
teal1 :: Color
teal1 = parse "#737A99"
-- |Defines the color of a strong blue, intended for nodes.
orange1 :: Color
orange1 = parse "#1E7FCC"
-- |Defines the color of a very dark, mostly black, violet intended for nodes.
blue1 :: Color
blue1 = parse "#261B2A"
-- |Defines the color of a dark moderate blue, intended for nodes.
blue2 :: Color
blue2 = parse "#336685"
-- |Defines the color of a slightly lighter than blue2 dark moderate blue,
-- intended for nodes.
blue3 :: Color
blue3 = parse "#437699"
-- |Defines the color of a soft blue, intended for nodes.
blue4 :: Color
blue4 = parse "#5566F5"
-- |Defines the color of a very soft blue, intended for nodes.
blue5 :: Color
blue5 = parse "#A5A6F5"
-- |Defines the color of a slightly lighter than blue5 very soft blue,
-- intended for nodes.
blue6 :: Color
blue6 = rgb 184 231 249
-- |Defines the color of a slightly more virbrant than blue2 dark moderate
-- blue, intended for nodes.
blueFb :: Color
blueFb = rgb 59 89 152
-- |Defines the color of a strong red, intended for nodes.
red1 :: Color
red1 = parse "#C92343"
-- |Defines the color of a darker than red1 strong red, intended for nodes.
red2 :: Color
red2 = parse "#B91333"
-- |Defines the color of a moderate orange, intended for nodes.
red3 :: Color
red3 = rgb 215 117 70
-- |Defines the color of a slightly darker than red3 moderate orange, intended
-- for nodes.
red4 :: Color
red4 = rgb 195 97 50
-- |Defines the color of a light grayish red, intended for nodes.
red5 :: Color
red5 = rgb 221 189 189
-- |Defines the color of a very soft lime green, intended for nodes.
green1 :: Color
green1 = rgb 170 228 164
-- |Defines the color of a moderate cyan - lime green, intended for nodes.
green2 :: Color
green2 = parse "#3Cb371"
-- |Defines the color of a slightly darker than red4 moderate orange, intended
-- for nodes
dRed :: T.Text
dRed = "#D77546"
-- |Defines the color of a dark moderate cyan - lime green, intended for
-- nodes.
dGreen :: T.Text
dGreen = "#2E8B57"
-- |Defines the color of a dark moderate blue, intended for nodes.
dBlue :: T.Text
dBlue = "#437699"
-- |Defines the color of a very dark grayish magenta, intended for nodes.
dPurple :: T.Text
dPurple = "#46364A"
-- |Defines the color of a very dark gray, mostly black, intended for nodes.
grey1 :: Color
grey1 = parse "#222"
-- |Defines the color of a very light gray, intended for nodes.
grey2 :: Color
grey2 = parse "#dedede"
-- |Defines the color of a dark gray, intended for nodes.
grey3 :: Color
grey3 = parse "#949494"
-- |Defines the color of a gray, intended for nodes.
grey4 :: Color
grey4 = parse "#BABABA"
-- |Defines the color of a slightly darker grey2 very light gray, intended for
-- nodes.
grey5 :: Color
grey5 = parse "#DCDCDC"
-- |Defines the color of a slightly lighter than grey3 dark gray, intended for
-- nodes.
grey6 :: Color
grey6 = parse "#9C9C9C"
-- |Defines the color of a light grayish orange, intended for nodes.
beige1 :: Color
beige1 = parse "#EBE8E4"
{-Color palette colors-}
-- |Defines the color of a very light red.
pastelRed :: Color
pastelRed = parse "#FF7878"
-- |Defines the color of a very light orange.
pastelOrange :: Color
pastelOrange = parse "#FFC48C"
-- |Defines the color of a very soft yellow.
pastelYellow :: Color
pastelYellow = parse "#EEDD99"
-- |Defines the color of a very soft lime green.
pastelGreen :: Color
pastelGreen = parse "#BDECB6"
-- |Defines the color of a very soft blue.
pastelBlue :: Color
pastelBlue = parse "#9BD1FA"
-- |Defines the color of a very pale red.
pastelPink :: Color
pastelPink = parse "#FFD1DC"
-- |Defines the color of a very soft magenta.
pastelPurple :: Color
pastelPurple = parse "#E3AAD6"
-- |Defines the color of a mostly desaturated dark orange.
pastelBrown :: Color
pastelBrown = parse "#AD876E"
-- |Defines the color of a dark grayish blue.
pastelGrey :: Color
pastelGrey = parse "#A2A9AF"
{- FCE count color. Currently unused. -}
-- |Defines the color of a light blue, intended for FCE count, and currently
-- unused.
fceCountColor :: Color
fceCountColor = parse "#66C2FF"
{- PostPage Color -}
-- |Defines the color of a soft dark green for post credits
mGreen :: Color
mGreen = parse "#519A73"
-- |Defines the color of a soft dark red for post credits
mRed :: Color
mRed = parse "#C91F37"
-- |Defines the color of a dark rose for bottom bar
darkRose :: Color
darkRose = parse "#815463"
-- |Defines the color of a soft dark rose for selected courses
softGreen :: Color
softGreen = parse "#669966"
{- Graph styles -}
-- |Defines node font size, 12 in pixels.
nodeFontSize :: Num a => a
nodeFontSize = 12
-- |Defines hybrid font size, 7 in pixels.
hybridFontSize :: Double
hybridFontSize = 7
-- |Defines bool font size, 6 in pixels.
boolFontSize :: Num a => a
boolFontSize = 6
-- |Defines region font size, 14 in pixels.
regionFontSize :: Num a => a
regionFontSize = 14
| christinem/courseography | app/Css/Constants.hs | gpl-3.0 | 11,290 | 0 | 9 | 2,427 | 1,648 | 985 | 663 | 266 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Grid.GridWorld.Path
(
Path (..),
PathEvent (..),
makePath,
makePathNull,
destroyPath,
pathNode,
pathTurn,
pathPush,
#ifdef GRID_STYLE_FANCY
module Game.Grid.GridWorld.Path.Fancy,
#endif
#ifdef GRID_STYLE_PLAIN
module Game.Grid.GridWorld.Path.Plain,
#endif
) where
import MyPrelude
import Game
import Game.Grid.GridWorld.SegmentArray
import Game.Grid.GridWorld.Segment
import Game.Grid.GridWorld.Node
import Game.Grid.GridWorld.Turn
#ifdef GRID_STYLE_FANCY
import Game.Grid.GridWorld.Path.Fancy
#endif
#ifdef GRID_STYLE_PLAIN
import Game.Grid.GridWorld.Path.Plain
#endif
data Path =
Path
{
-- Path1
pathCurrent :: !Segment, -- ^ current
pathAlpha :: !Float, -- ^ current alpha, [0, 1) typically
-- Path0
pathArray :: SegmentArray, -- ^ segments before current
pathArraySize :: !UInt, -- ^ size of pathArray
pathArrayBegin :: !UInt, -- ^ begin ix
pathArrayEnd :: !UInt, -- ^ end ix
pathOverflowCount :: !UInt, -- ^ counting overwrites
pathSpeed :: !Float, -- ^ speed of alpha
pathWaiting :: !Bool, -- ^ pause
pathEvents :: [PathEvent], -- ^ events
-- output state
pathPathOutput :: !PathOutput, -- ^ data used by output
-- control state. such should ideally be extern to Path
pathTurnState :: ![Turn], -- ^ next turns relative to path
pathTurnStateX :: !Float, -- ^ X drag
pathTurnStateY :: !Float, -- ^ Y drag
pathTurnStateHandled :: !Bool -- ^ handled
}
data PathEvent =
EventNewSegment
-- | current node
pathNode :: Path -> Node
pathNode =
segmentNode . pathCurrent
-- | current turn relative to world
pathTurn :: Path -> Turn
pathTurn =
segmentTurn . pathCurrent
makePath :: UInt -> MEnv' Path
makePath maxSize = do
#ifdef DEBUG
when (valueGridMaxPathSize < maxSize) $
error ("makePath: " ++ show maxSize ++ " is too big size")
#endif
let size = maxSize + 1
po <- makePathOutput size
return Path
{
pathCurrent = mempty,
pathAlpha = 0.0,
pathArray = makeSegmentArray size,
pathArraySize = size,
pathArrayBegin = 0,
pathArrayEnd = 0,
pathPathOutput = po,
pathOverflowCount = 0,
pathSpeed = 1.0,
pathWaiting = False,
pathEvents = [],
pathTurnState = [],
pathTurnStateX = 0.0,
pathTurnStateY = 0.0,
pathTurnStateHandled = False
}
-- | null path
makePathNull :: MEnv' Path
makePathNull =
makePath 0
destroyPath :: Path -> MEnv' ()
destroyPath path = do
destroyPathOutput $ pathPathOutput path
pathPush :: Path -> Segment -> IO Path
pathPush path seg = do
let size = pathArraySize path
begin = pathArrayBegin path
end = pathArrayEnd path
end' = succMod size end
array' = segmentarrayWrite (pathArray path) end seg
-- PathOutput
po' <- writePathOutput (pathPathOutput path) size end seg
if end' == begin
then return path
{
pathArray = array',
pathArrayEnd = end',
pathPathOutput = po',
pathArrayBegin = succMod size begin,
pathOverflowCount = pathOverflowCount path + 1
}
else return path
{
pathArray = array',
pathArrayEnd = end',
pathPathOutput = po'
}
| karamellpelle/grid | source/Game/Grid/GridWorld/Path.hs | gpl-3.0 | 4,634 | 0 | 12 | 1,583 | 742 | 448 | 294 | 112 | 2 |
{-# LANGUAGE DeriveFunctor, DeriveDataTypeable, TemplateHaskell #-}
module Lamdu.Data.Expression.Infer.ImplicitVariables
( add, Payload(..)
) where
import Control.Applicative ((<$>))
import Control.Lens.Operators
import Control.Monad (foldM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State (StateT, State, evalStateT, mapStateT, state)
import Control.Monad.Trans.State.Utils (toStateT)
import Control.MonadA (MonadA)
import Data.Binary (Binary(..), getWord8, putWord8)
import Data.Derive.Binary (makeBinary)
import Data.DeriveTH (derive)
import Data.Monoid (mempty)
import Data.Store.Guid (Guid)
import Data.Typeable (Typeable)
import System.Random (RandomGen, random)
import qualified Control.Lens as Lens
import qualified Control.Monad.Trans.State as State
import qualified Data.Store.Guid as Guid
import qualified Lamdu.Data.Expression as Expr
import qualified Lamdu.Data.Expression.Infer as Infer
import qualified Lamdu.Data.Expression.Infer.UntilConflict as InferUntilConflict
import qualified Lamdu.Data.Expression.Lens as ExprLens
import qualified Lamdu.Data.Expression.Utils as ExprUtil
data Payload a = Stored a | AutoGen Guid
deriving (Eq, Ord, Show, Functor, Typeable)
derive makeBinary ''Payload
isUnrestrictedHole :: Expr.Expression def Infer.IsRestrictedPoly -> Bool
isUnrestrictedHole
(Expr.Expression
(Expr.BodyLeaf Expr.Hole)
Infer.UnrestrictedPoly) = True
isUnrestrictedHole _ = False
addVariableForHole ::
(Show def, Ord def, RandomGen g) =>
Infer.InferNode def ->
StateT g (State (Infer.Context def)) (Guid, Infer.InferNode def)
addVariableForHole holePoint = do
paramGuid <- state random
let
getVar = ExprLens.pureExpr . ExprLens.bodyParameterRef # paramGuid
getVarLoaded = Infer.loadIndependent (("Loading a mere getVar: " ++) . show) Nothing getVar
lift $ do
inferredGetVar <-
InferUntilConflict.inferAssertNoConflict
"ImplicitVariables.addVariableForHole" getVarLoaded holePoint
let
paramTypeRef =
Infer.tvType . Infer.nRefs . Infer.iPoint . fst $
inferredGetVar ^. Expr.ePayload
paramTypeTypeRef <- Infer.createRefExpr
return
( paramGuid
, Infer.InferNode (Infer.TypedValue paramTypeRef paramTypeTypeRef) mempty
)
addVariablesForExpr ::
(MonadA m, Show def, Ord def, RandomGen g) =>
Infer.Loader def m ->
Expr.Expression def (Infer.Inferred def, a) ->
StateT g (StateT (Infer.Context def) m) [(Guid, Infer.InferNode def)]
addVariablesForExpr loader expr = do
reinferred <-
lift . State.gets . Infer.derefExpr $
expr & Lens.traversed . Lens._1 %~ Infer.iPoint
if isUnrestrictedHole $ inferredVal reinferred
then
fmap (:[]) . mapStateT toStateT . addVariableForHole $
Infer.iPoint . fst $ expr ^. Expr.ePayload
else do
reloaded <-
lift . lift . Infer.load loader Nothing $ -- <-- TODO: Nothing?
inferredVal reinferred
reinferredLoaded <-
lift . toStateT .
InferUntilConflict.inferAssertNoConflict
"ImplicitVariables.addVariableForExpr" reloaded .
Infer.iPoint $ reinferred ^. Expr.ePayload . Lens._1
fmap concat . mapM (addVariablesForExpr loader) .
filter (isUnrestrictedHole . inferredVal) $
ExprUtil.subExpressionsWithoutTags reinferredLoaded
where
inferredVal = Infer.iValue . fst . (^. Expr.ePayload)
addParam ::
Ord def =>
Expr.Expression def (Infer.InferNode def, Payload a) ->
(Guid, Infer.InferNode def) ->
State (Infer.Context def)
(Expr.Expression def (Infer.InferNode def, Payload a))
addParam body (paramGuid, paramTypeNode) = do
newRootNode <- Infer.newNodeWithScope mempty
let
newRootExpr =
Expr.Expression newRootLam (newRootNode, AutoGen (Guid.augment "root" paramGuid))
InferUntilConflict.assertNoConflict "Infer error when adding implicit vars" $
Infer.addRules InferUntilConflict.actions [fst <$> newRootExpr]
return newRootExpr
where
paramTypeExpr =
Expr.Expression
(Expr.BodyLeaf Expr.Hole)
(paramTypeNode, AutoGen (Guid.augment "paramType" paramGuid))
newRootLam =
ExprUtil.makeLambda paramGuid paramTypeExpr body
add ::
(MonadA m, Ord def, Show def, RandomGen g) =>
g -> Infer.Loader def m ->
Expr.Expression def (Infer.Inferred def, a) ->
StateT (Infer.Context def) m
(Expr.Expression def (Infer.Inferred def, Payload a))
add gen loader expr = do
implicitParams <-
(`evalStateT` gen) . fmap concat .
mapM (addVariablesForExpr loader) $ ExprUtil.curriedFuncArguments expr
newRoot <- toStateT $ foldM addParam baseExpr implicitParams
State.gets $ Infer.derefExpr newRoot
where
baseExpr =
expr & Lens.traversed %~
(Lens._1 %~ Infer.iPoint) .
(Lens._2 %~ Stored)
| Mathnerd314/lamdu | src/Lamdu/Data/Expression/Infer/ImplicitVariables.hs | gpl-3.0 | 4,785 | 0 | 18 | 860 | 1,432 | 761 | 671 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
module CSVReader
(
readData
) where
import Control.Lens
import Control.Monad
import Data.Maybe
import Data.List
import Text.CSV -- http://hackage.haskell.org/package/csv
import LectTypes
import LectData
import Debug.Trace
{- CAVEAT: You need the following definitions in 'Ranking.hs':
class Ranking m where
pointForRank :: m -> Int
instance Ranking Point where
pointForRank _ = 0 -- or whatever
instance Ranking SubjectWithPoint where
pointForRank x = 0 -- or whatever
-}
import Ranking
makeLenses ''YearProperty
makeLenses ''Student
data SheetID = A | B deriving (Enum, Eq, Ord, Show)
data FieldIndex = FieldIndex SheetID Int deriving (Eq, Ord, Show)
-- | CSVの中でのフィールド番号に対する科目の辞書
type Field2Subject = [(FieldIndex, Subject)]
-- | CSVの中でのフィールド番号に対する点数の辞書
type Field2Point = [(FieldIndex, Point)]
readData :: Bool -> YearProperty -> [FilePath] -> IO [Student]
readData debug prop csvs = map (calcFields prop . filterLects debug prop) . mergeStudents <$> zipWithM (readCSVFile debug prop) [A, B] csvs
-- | この関数には注意すること。その年の科目しか読んでないことにしてしまう!
filterLects :: Bool -> YearProperty -> Student -> Student
filterLects debug (_subjectsOfYear -> subs) s
| null x = s { _studentArchives = filter ((`elem` subs) . fst) (_studentArchives s) }
| otherwise = error $ "invalid data: " ++ intercalate ", " (map subjectName x)
where
x = if debug then filter ((`notElem` subs) . fst) (_studentArchives s) else []
readCSVFile :: Bool -> YearProperty -> SheetID -> FilePath -> IO [Student]
readCSVFile debug cond p f = do
res <- parseCSVFromFile f
case res of
Left _ -> error $ "can't read: " ++ f
Right d -> do
let (subjects, d') = buildSubjectTable debug cond p d
return . catMaybes $ map (makeStudent p subjects) d'
buildSubjectTable :: Bool -> YearProperty -> SheetID -> CSV -> (Field2Subject, CSV)
buildSubjectTable debug cond p c = (subjects, tail c)
where
subjects = mapMaybe (searchSubjectByName cond) . zip (map mkFieldIndex [3 ..]) . drop 3 . head $ c
-- subjects = if all (flip elem allCatgories) (nub (map category subjects'))
-- then subjects'
-- else error $ "Unhandled categories exist:" ++ intercalate "," (filter (not . (flip elem allCatgories)) (nub (map category subjects')))
allCatgories = (cond ^. categoriesOfYear) ^. each
mkFieldIndex i = FieldIndex p i
-- | name itself is not a primary key in the CSV files!
searchSubjectByName :: YearProperty -> (FieldIndex, String) -> Maybe (FieldIndex, Subject)
searchSubjectByName (_subjectsOfYear -> subs) (i, n)
| Just sub <- find checkCategory'Id'Name subs = Just (i, sub)
| Just sub <- find checkCategory'Id'Name allSubs =
if debug
then
let
req = if isInfixOf "必修" n then "Req" else "Opt"
unit = last n
name = reverse . drop 3 . reverse $ n
in
trace (" , Subject \"" ++ name ++ "\" " ++ req ++ " " ++ [unit]) Nothing
else Nothing
| otherwise = error $ n ++ "ABORT: the subject data is not found:\n , Subject \"" ++ n ++ "\" False 2"
where
-- | allsubs
allSubs = concatMap (_subjectsOfYear . snd) lectData
-- | In new format (2016-01-22), subject names in csv end with
checkCategory'Id'Name Subject{..} =
(_subjectCategory `isPrefixOf` n)
&& (_subjectIDNumber `isInfixOf` n)
&& (_subjectName `isInfixOf` n)
-- && ((if _required then "必修" else "選択") `isInfixOf` n)
-- && (show _numUnits `isSuffixOf` n)
-- | 科目番号が重複出現する再履修科目のマージはここで行うこと
makeStudent :: SheetID -> Field2Subject -> Record -> Maybe Student
makeStudent _ _ [] = Nothing
makeStudent _ _ [""] = Nothing
makeStudent sid table l@(_ : edNum : name : _) =
Just $
Student
name -- _studentName :: String
edNum -- _studentNumber :: String
(makeTable l) -- _studentArchives :: PointTable
([], []) -- _studentState :: ([ConditionResult], [ConditionResult])
emptySummary -- _studentSummary :: UnitSummary
0 -- _studentGPA :: GPAPoint
0 -- _studentRankPoint :: rankPoint
where
nubSubjects :: PointTable -> PointTable
nubSubjects = foldr mergeSubjects []
mergeSubjects :: SubjectWithPoint -> PointTable -> PointTable
mergeSubjects s lst = map f $ s:lst
where
f s' = case merge s s' of
Just s_ -> s_
Nothing -> s'
makeTable row = nubSubjects . catMaybes $ zipWith try (map (FieldIndex sid) [0 ..]) row
try i (toPoint -> p) = (, p) <$> lookup i table
makeStudent _ _ x = error $ "`makeStudent` :" ++ show x
mergeStudents :: [[Student]] -> [Student]
mergeStudents [] =[]
mergeStudents (s:x) = map f allNumbers
where
merged = mergeStudents x
allNumbers = nub $ map _studentNumber s ++ map _studentNumber merged
f :: String -> Student
f name =
case (find ((name ==) . _studentNumber) s, find ((name ==) . _studentNumber) merged) of
(Just d1 , Just d2) -> let Just d = merge d1 d2 in d
(Just d1 , Nothing) -> d1
(Nothing , Just d2) -> d2
_ -> error "impossible situation in mergeStudents"
| shnarazk/gradcondtools | CSVReader.hs | gpl-3.0 | 5,678 | 0 | 17 | 1,440 | 1,470 | 776 | 694 | 94 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Tasks.Tasks.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns all tasks in the specified task list.
--
-- /See:/ <https://developers.google.com/tasks/ Tasks API Reference> for @tasks.tasks.list@.
module Network.Google.Resource.Tasks.Tasks.List
(
-- * REST Resource
TasksListResource
-- * Creating a Request
, tasksList
, TasksList
-- * Request Lenses
, tlXgafv
, tlUploadProtocol
, tlAccessToken
, tlDueMax
, tlUploadType
, tlShowDeleted
, tlShowCompleted
, tlDueMin
, tlShowHidden
, tlCompletedMax
, tlUpdatedMin
, tlTaskList
, tlCompletedMin
, tlPageToken
, tlMaxResults
, tlCallback
) where
import Network.Google.AppsTasks.Types
import Network.Google.Prelude
-- | A resource alias for @tasks.tasks.list@ method which the
-- 'TasksList' request conforms to.
type TasksListResource =
"tasks" :>
"v1" :>
"lists" :>
Capture "tasklist" Text :>
"tasks" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "dueMax" Text :>
QueryParam "uploadType" Text :>
QueryParam "showDeleted" Bool :>
QueryParam "showCompleted" Bool :>
QueryParam "dueMin" Text :>
QueryParam "showHidden" Bool :>
QueryParam "completedMax" Text :>
QueryParam "updatedMin" Text :>
QueryParam "completedMin" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32)
:>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] Tasks
-- | Returns all tasks in the specified task list.
--
-- /See:/ 'tasksList' smart constructor.
data TasksList =
TasksList'
{ _tlXgafv :: !(Maybe Xgafv)
, _tlUploadProtocol :: !(Maybe Text)
, _tlAccessToken :: !(Maybe Text)
, _tlDueMax :: !(Maybe Text)
, _tlUploadType :: !(Maybe Text)
, _tlShowDeleted :: !(Maybe Bool)
, _tlShowCompleted :: !(Maybe Bool)
, _tlDueMin :: !(Maybe Text)
, _tlShowHidden :: !(Maybe Bool)
, _tlCompletedMax :: !(Maybe Text)
, _tlUpdatedMin :: !(Maybe Text)
, _tlTaskList :: !Text
, _tlCompletedMin :: !(Maybe Text)
, _tlPageToken :: !(Maybe Text)
, _tlMaxResults :: !(Maybe (Textual Int32))
, _tlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TasksList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tlXgafv'
--
-- * 'tlUploadProtocol'
--
-- * 'tlAccessToken'
--
-- * 'tlDueMax'
--
-- * 'tlUploadType'
--
-- * 'tlShowDeleted'
--
-- * 'tlShowCompleted'
--
-- * 'tlDueMin'
--
-- * 'tlShowHidden'
--
-- * 'tlCompletedMax'
--
-- * 'tlUpdatedMin'
--
-- * 'tlTaskList'
--
-- * 'tlCompletedMin'
--
-- * 'tlPageToken'
--
-- * 'tlMaxResults'
--
-- * 'tlCallback'
tasksList
:: Text -- ^ 'tlTaskList'
-> TasksList
tasksList pTlTaskList_ =
TasksList'
{ _tlXgafv = Nothing
, _tlUploadProtocol = Nothing
, _tlAccessToken = Nothing
, _tlDueMax = Nothing
, _tlUploadType = Nothing
, _tlShowDeleted = Nothing
, _tlShowCompleted = Nothing
, _tlDueMin = Nothing
, _tlShowHidden = Nothing
, _tlCompletedMax = Nothing
, _tlUpdatedMin = Nothing
, _tlTaskList = pTlTaskList_
, _tlCompletedMin = Nothing
, _tlPageToken = Nothing
, _tlMaxResults = Nothing
, _tlCallback = Nothing
}
-- | V1 error format.
tlXgafv :: Lens' TasksList (Maybe Xgafv)
tlXgafv = lens _tlXgafv (\ s a -> s{_tlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
tlUploadProtocol :: Lens' TasksList (Maybe Text)
tlUploadProtocol
= lens _tlUploadProtocol
(\ s a -> s{_tlUploadProtocol = a})
-- | OAuth access token.
tlAccessToken :: Lens' TasksList (Maybe Text)
tlAccessToken
= lens _tlAccessToken
(\ s a -> s{_tlAccessToken = a})
-- | Upper bound for a task\'s due date (as a RFC 3339 timestamp) to filter
-- by. Optional. The default is not to filter by due date.
tlDueMax :: Lens' TasksList (Maybe Text)
tlDueMax = lens _tlDueMax (\ s a -> s{_tlDueMax = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
tlUploadType :: Lens' TasksList (Maybe Text)
tlUploadType
= lens _tlUploadType (\ s a -> s{_tlUploadType = a})
-- | Flag indicating whether deleted tasks are returned in the result.
-- Optional. The default is False.
tlShowDeleted :: Lens' TasksList (Maybe Bool)
tlShowDeleted
= lens _tlShowDeleted
(\ s a -> s{_tlShowDeleted = a})
-- | Flag indicating whether completed tasks are returned in the result.
-- Optional. The default is True. Note that showHidden must also be True to
-- show tasks completed in first party clients, such as the web UI and
-- Google\'s mobile apps.
tlShowCompleted :: Lens' TasksList (Maybe Bool)
tlShowCompleted
= lens _tlShowCompleted
(\ s a -> s{_tlShowCompleted = a})
-- | Lower bound for a task\'s due date (as a RFC 3339 timestamp) to filter
-- by. Optional. The default is not to filter by due date.
tlDueMin :: Lens' TasksList (Maybe Text)
tlDueMin = lens _tlDueMin (\ s a -> s{_tlDueMin = a})
-- | Flag indicating whether hidden tasks are returned in the result.
-- Optional. The default is False.
tlShowHidden :: Lens' TasksList (Maybe Bool)
tlShowHidden
= lens _tlShowHidden (\ s a -> s{_tlShowHidden = a})
-- | Upper bound for a task\'s completion date (as a RFC 3339 timestamp) to
-- filter by. Optional. The default is not to filter by completion date.
tlCompletedMax :: Lens' TasksList (Maybe Text)
tlCompletedMax
= lens _tlCompletedMax
(\ s a -> s{_tlCompletedMax = a})
-- | Lower bound for a task\'s last modification time (as a RFC 3339
-- timestamp) to filter by. Optional. The default is not to filter by last
-- modification time.
tlUpdatedMin :: Lens' TasksList (Maybe Text)
tlUpdatedMin
= lens _tlUpdatedMin (\ s a -> s{_tlUpdatedMin = a})
-- | Task list identifier.
tlTaskList :: Lens' TasksList Text
tlTaskList
= lens _tlTaskList (\ s a -> s{_tlTaskList = a})
-- | Lower bound for a task\'s completion date (as a RFC 3339 timestamp) to
-- filter by. Optional. The default is not to filter by completion date.
tlCompletedMin :: Lens' TasksList (Maybe Text)
tlCompletedMin
= lens _tlCompletedMin
(\ s a -> s{_tlCompletedMin = a})
-- | Token specifying the result page to return. Optional.
tlPageToken :: Lens' TasksList (Maybe Text)
tlPageToken
= lens _tlPageToken (\ s a -> s{_tlPageToken = a})
-- | Maximum number of task lists returned on one page. Optional. The default
-- is 20 (max allowed: 100).
tlMaxResults :: Lens' TasksList (Maybe Int32)
tlMaxResults
= lens _tlMaxResults (\ s a -> s{_tlMaxResults = a})
. mapping _Coerce
-- | JSONP
tlCallback :: Lens' TasksList (Maybe Text)
tlCallback
= lens _tlCallback (\ s a -> s{_tlCallback = a})
instance GoogleRequest TasksList where
type Rs TasksList = Tasks
type Scopes TasksList =
'["https://www.googleapis.com/auth/tasks",
"https://www.googleapis.com/auth/tasks.readonly"]
requestClient TasksList'{..}
= go _tlTaskList _tlXgafv _tlUploadProtocol
_tlAccessToken
_tlDueMax
_tlUploadType
_tlShowDeleted
_tlShowCompleted
_tlDueMin
_tlShowHidden
_tlCompletedMax
_tlUpdatedMin
_tlCompletedMin
_tlPageToken
_tlMaxResults
_tlCallback
(Just AltJSON)
appsTasksService
where go
= buildClient (Proxy :: Proxy TasksListResource)
mempty
| brendanhay/gogol | gogol-apps-tasks/gen/Network/Google/Resource/Tasks/Tasks/List.hs | mpl-2.0 | 8,980 | 0 | 28 | 2,483 | 1,540 | 884 | 656 | 208 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.